diff --git a/tests/get_unused_disk.yml b/tests/get_unused_disk.yml index 16dbfaf..8267bce 100644 --- a/tests/get_unused_disk.yml +++ b/tests/get_unused_disk.yml @@ -38,11 +38,28 @@ unused_disks: "{{ unused_disks_return.disks }}" when: "'Unable to find unused disk' not in unused_disks_return.disks" -- name: Exit playbook when there's not enough unused disks in the system - fail: - msg: "Unable to find enough unused disks. Exiting playbook." - when: unused_disks | d([]) | length < disks_needed | d(1) - - name: Print unused disks debug: var: unused_disks + verbosity: 2 + +- name: Try to find out why there are not enough unused disks + when: unused_disks | d([]) | length < disks_needed | d(1) + block: + - name: Print info from find_unused_disk + debug: + var: unused_disks_return.info + when: unused_disks_return.info | d([]) | length > 0 + + - name: Show disk information + shell: | + set -euxo pipefail + exec 1>&2 + lvs --all + pvs --all + vgs --all + changed_when: false + + - name: Exit playbook when there's not enough unused disks in the system + fail: + msg: "Unable to find enough unused disks. Exiting playbook." diff --git a/tests/library/find_unused_disk.py b/tests/library/find_unused_disk.py index d548a3a..0c98525 100644 --- a/tests/library/find_unused_disk.py +++ b/tests/library/find_unused_disk.py @@ -185,37 +185,56 @@ def run_module(): module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) max_size = Size(module.params["max_size"]) + info = [] for path, attrs in get_disks(module).items(): if is_ignored(path): + info.append("Disk [%s] attrs [%s] is ignored" % (path, attrs)) continue interface = module.params["with_interface"] if interface is not None and not is_device_interface(module, path, interface): + info.append( + "Disk [%s] attrs [%s] is not an interface [%s]" + % (path, attrs, interface) + ) continue if attrs["fstype"]: + info.append("Disk [%s] attrs [%s] has fstype" % (path, attrs)) continue if Size(attrs["size"]).bytes < Size(module.params["min_size"]).bytes: + info.append( + "Disk [%s] attrs [%s] size is less than requested" % (path, attrs) + ) continue if max_size.bytes > 0 and Size(attrs["size"]).bytes > max_size.bytes: + info.append( + "Disk [%s] attrs [%s] size is greater than requested" % (path, attrs) + ) continue if get_partitions(path): + info.append("Disk [%s] attrs [%s] has partitions" % (path, attrs)) continue if not no_holders(get_sys_name(path)): + info.append("Disk [%s] attrs [%s] has holders" % (path, attrs)) continue if not can_open(path): + info.append( + "Disk [%s] attrs [%s] cannot be opened exclusively" % (path, attrs) + ) continue result["disks"].append(os.path.basename(path)) if len(result["disks"]) >= module.params["max_return"]: break + result["info"] = info if not result["disks"]: result["disks"] = "Unable to find unused disk" else: diff --git a/tests/tasks/cleanup.yml b/tests/tasks/cleanup.yml new file mode 100644 index 0000000..a8cdfd3 --- /dev/null +++ b/tests/tasks/cleanup.yml @@ -0,0 +1,33 @@ +# SPDX-License-Identifier: MIT +# Input: +# - test_disk_min_size e.g. "1g" +# - test_disk_count e.g. 10 +# - test_storage_pools - the list of pools & volumes to create +--- +- name: Remove storage volumes + include_role: + name: fedora.linux_system_roles.storage + vars: + storage_safe_mode: false + storage_pools: | + {% set cleanup_pools = [] %} + {% for pool in test_storage_pools %} + {% set clean_pool = {"name": pool["name"], + "disks": pool["disks"], "state": "absent"} %} + {% set vols = [] %} + {% for vol in pool.get("volumes", []) %} + {% set clean_vol = {"name": vol["name"], "state": "absent"} %} + {% set _ = vols.append(clean_vol) %} + {% endfor %} + {% if vols %} + {% set _ = clean_pool.__setitem__("volumes", vols) %} + {% endif %} + {% set _ = cleanup_pools.append(clean_pool) %} + {% endfor %} + {{ cleanup_pools }} + +- name: Verify that pools/volumes used in test are removed + include_tasks: get_unused_disk.yml + vars: + min_size: "{{ test_disk_min_size }}" + min_return: "{{ test_disk_count }}" diff --git a/tests/tasks/setup.yml b/tests/tasks/setup.yml new file mode 100644 index 0000000..95fa501 --- /dev/null +++ b/tests/tasks/setup.yml @@ -0,0 +1,34 @@ +# SPDX-License-Identifier: MIT +# Input: +# - test_disk_min_size e.g. "1g" +# - test_disk_count e.g. 10 +# - test_storage_pools - the list of pools & volumes to create +# Output: +# - unused_disks e.g. ["sda", "sdb", ..] +# - test_mnt_parent e.g. /mnt or /var/mnt +--- +- name: Check if system is ostree + stat: + path: /run/ostree-booted + register: __ostree_booted_stat + +- name: Set mount parent + set_fact: + test_mnt_parent: "{{ __ostree_booted_stat.stat.exists | + ternary('/var/mnt', '/mnt') }}" + +- name: Run the storage role install base packages + include_role: + name: fedora.linux_system_roles.storage + +- name: Get unused disks + include_tasks: get_unused_disk.yml + vars: + min_size: "{{ test_disk_min_size }}" + min_return: "{{ test_disk_count }}" + +- name: Create LVM logical volumes under volume groups + include_role: + name: fedora.linux_system_roles.storage + vars: + storage_pools: "{{ test_storage_pools }}" diff --git a/tests/tests_basic.yml b/tests/tests_basic.yml index c109016..de194d0 100644 --- a/tests/tests_basic.yml +++ b/tests/tests_basic.yml @@ -1,58 +1,40 @@ --- - name: Basic snapshot test hosts: all + vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "15%" + - name: lv2 + size: "50%" + - name: test_vg2 + disks: "{{ range(3, 6) | map('extract', unused_disks) | list }}" + volumes: + - name: lv3 + size: "10%" + - name: lv4 + size: "20%" + - name: test_vg3 + disks: "{{ range(6, 10) | map('extract', unused_disks) | list }}" + volumes: + - name: lv5 + size: "30%" + - name: lv6 + size: "25%" + - name: lv7 + size: "10%" + - name: lv8 + size: "10%" tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk lists - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - disk_list_2: "{{ range(3, 6) | map('extract', unused_disks) | - list }}" - disk_list_3: "{{ range(6, 10) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "15%" - - name: lv2 - size: "50%" - - name: test_vg2 - disks: "{{ disk_list_2 }}" - volumes: - - name: lv3 - size: "10%" - - name: lv4 - size: "20%" - - name: test_vg3 - disks: "{{ disk_list_3 }}" - volumes: - - name: lv5 - size: "30%" - - name: lv6 - size: "25%" - - name: lv7 - size: "10%" - - name: lv8 - size: "10%" + - name: Setup + include_tasks: tasks/setup.yml - name: Run the snapshot role to create snapshot LVs include_role: @@ -87,37 +69,6 @@ snapshot_lvm_verify_only: true snapshot_lvm_action: remove always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent - - name: lv2 - state: absent - - name: test_vg2 - disks: "{{ disk_list_2 }}" - state: absent - volumes: - - name: lv3 - state: absent - - name: lv4 - state: absent - - name: test_vg3 - disks: "{{ disk_list_3 }}" - state: absent - volumes: - - name: lv5 - state: absent - - name: lv6 - state: absent - - name: lv7 - state: absent - - name: lv8 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_check_no_lv_fail.yml b/tests/tests_check_no_lv_fail.yml index e170210..74c2d30 100644 --- a/tests/tests_check_no_lv_fail.yml +++ b/tests/tests_check_no_lv_fail.yml @@ -1,34 +1,20 @@ --- - name: Verify the check commmand fails when source LV doesn't exist hosts: all + vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "50%" tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk list - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "50%" + - name: Setup + include_tasks: tasks/setup.yml - name: Test failure of check with verify only set for incorrect LV include_tasks: verify-role-failed.yml @@ -43,15 +29,6 @@ snapshot_lvm_verify_only: true snapshot_lvm_action: check always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_check_no_vg_fail.yml b/tests/tests_check_no_vg_fail.yml index b807513..2bdda88 100644 --- a/tests/tests_check_no_vg_fail.yml +++ b/tests/tests_check_no_vg_fail.yml @@ -1,34 +1,20 @@ --- - name: Verify the check commmand fails when source VG doesn't exist hosts: all + vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "50%" tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk list - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "50%" + - name: Setup + include_tasks: tasks/setup.yml - name: Test failure of check for incorrect VG include_tasks: verify-role-failed.yml @@ -41,15 +27,6 @@ snapshot_lvm_verify_only: true snapshot_lvm_action: check always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_extend_basic.yml b/tests/tests_extend_basic.yml index e3f2e9c..5fc8f7f 100644 --- a/tests/tests_extend_basic.yml +++ b/tests/tests_extend_basic.yml @@ -1,58 +1,40 @@ --- - name: Basic snapshot test hosts: all + vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "10%" + - name: lv2 + size: "10%" + - name: test_vg2 + disks: "{{ range(3, 6) | map('extract', unused_disks) | list }}" + volumes: + - name: lv3 + size: "10%" + - name: lv4 + size: "10%" + - name: test_vg3 + disks: "{{ range(6, 10) | map('extract', unused_disks) | list }}" + volumes: + - name: lv5 + size: "10%" + - name: lv6 + size: "10%" + - name: lv7 + size: "10%" + - name: lv8 + size: "10%" tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk lists - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - disk_list_2: "{{ range(3, 6) | map('extract', unused_disks) | - list }}" - disk_list_3: "{{ range(6, 10) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "10%" - - name: lv2 - size: "10%" - - name: test_vg2 - disks: "{{ disk_list_2 }}" - volumes: - - name: lv3 - size: "10%" - - name: lv4 - size: "10%" - - name: test_vg3 - disks: "{{ disk_list_3 }}" - volumes: - - name: lv5 - size: "10%" - - name: lv6 - size: "10%" - - name: lv7 - size: "10%" - - name: lv8 - size: "10%" + - name: Setup + include_tasks: tasks/setup.yml - name: Run the snapshot role to create snapshot LVs include_role: @@ -91,37 +73,6 @@ snapshot_lvm_verify_only: true snapshot_lvm_action: extend always: - - name: Clean up storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent - - name: lv2 - state: absent - - name: test_vg2 - disks: "{{ disk_list_2 }}" - state: absent - volumes: - - name: lv3 - state: absent - - name: lv4 - state: absent - - name: test_vg3 - disks: "{{ disk_list_3 }}" - state: absent - volumes: - - name: lv5 - state: absent - - name: lv6 - state: absent - - name: lv7 - state: absent - - name: lv8 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_list.yml b/tests/tests_list.yml index 99270dc..5595613 100644 --- a/tests/tests_list.yml +++ b/tests/tests_list.yml @@ -1,58 +1,40 @@ --- - name: Basic list snapshot test hosts: all + vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "15%" + - name: lv2 + size: "50%" + - name: test_vg2 + disks: "{{ range(3, 6) | map('extract', unused_disks) | list }}" + volumes: + - name: lv3 + size: "10%" + - name: lv4 + size: "20%" + - name: test_vg3 + disks: "{{ range(6, 10) | map('extract', unused_disks) | list }}" + volumes: + - name: lv5 + size: "30%" + - name: lv6 + size: "25%" + - name: lv7 + size: "10%" + - name: lv8 + size: "10%" tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk lists - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - disk_list_2: "{{ range(3, 6) | map('extract', unused_disks) | - list }}" - disk_list_3: "{{ range(6, 10) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "15%" - - name: lv2 - size: "50%" - - name: test_vg2 - disks: "{{ disk_list_2 }}" - volumes: - - name: lv3 - size: "10%" - - name: lv4 - size: "20%" - - name: test_vg3 - disks: "{{ disk_list_3 }}" - volumes: - - name: lv5 - size: "30%" - - name: lv6 - size: "25%" - - name: lv7 - size: "10%" - - name: lv8 - size: "10%" + - name: Setup + include_tasks: tasks/setup.yml - name: Run the snapshot role to create snapshot LVs include_role: @@ -94,37 +76,6 @@ snapshot_lvm_verify_only: true snapshot_lvm_action: remove always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent - - name: lv2 - state: absent - - name: test_vg2 - disks: "{{ disk_list_2 }}" - state: absent - volumes: - - name: lv3 - state: absent - - name: lv4 - state: absent - - name: test_vg3 - disks: "{{ disk_list_3 }}" - state: absent - volumes: - - name: lv5 - state: absent - - name: lv6 - state: absent - - name: lv7 - state: absent - - name: lv8 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_mount.yml b/tests/tests_mount.yml index 517d353..02768db 100644 --- a/tests/tests_mount.yml +++ b/tests/tests_mount.yml @@ -1,66 +1,48 @@ --- - name: Basic mount snapshot test hosts: all + vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "15%" + fs_type: xfs + - name: lv2 + size: "50%" + fs_type: xfs + - name: test_vg2 + disks: "{{ range(3, 6) | map('extract', unused_disks) | list }}" + volumes: + - name: lv3 + size: "10%" + fs_type: xfs + - name: lv4 + size: "20%" + fs_type: xfs + - name: test_vg3 + disks: "{{ range(6, 10) | map('extract', unused_disks) | list }}" + volumes: + - name: lv5 + size: "30%" + fs_type: xfs + - name: lv6 + size: "25%" + fs_type: xfs + - name: lv7 + size: "10%" + fs_type: xfs + - name: lv8 + size: "10%" + fs_type: xfs tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk lists - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - disk_list_2: "{{ range(3, 6) | map('extract', unused_disks) | - list }}" - disk_list_3: "{{ range(6, 10) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "15%" - fs_type: xfs - - name: lv2 - size: "50%" - fs_type: xfs - - name: test_vg2 - disks: "{{ disk_list_2 }}" - volumes: - - name: lv3 - size: "10%" - fs_type: xfs - - name: lv4 - size: "20%" - fs_type: xfs - - name: test_vg3 - disks: "{{ disk_list_3 }}" - volumes: - - name: lv5 - size: "30%" - fs_type: xfs - - name: lv6 - size: "25%" - fs_type: xfs - - name: lv7 - size: "10%" - fs_type: xfs - - name: lv8 - size: "10%" - fs_type: xfs + - name: Setup + include_tasks: tasks/setup.yml - name: Run the snapshot role to create snapshot LVs include_role: @@ -71,16 +53,6 @@ snapshot_lvm_snapset_name: snapset1 snapshot_lvm_action: snapshot - - name: Check if system is ostree - stat: - path: /run/ostree-booted - register: __ostree_booted_stat - - - name: Set mount point - set_fact: - __mnt: "{{ __ostree_booted_stat.stat.exists | - ternary('/var/mnt', '/mnt') }}" - - name: Verify the snapshot LVs are created include_role: name: linux-system-roles.snapshot @@ -98,7 +70,7 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv1 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv1_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv1_mp' }}" snapshot_lvm_mountpoint_create: true - name: Mount the snapshot for lv2 @@ -109,7 +81,7 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv2 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv2_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv2_mp' }}" snapshot_lvm_mountpoint_create: true - name: Mount the snapshot for lv7 @@ -120,7 +92,7 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg3 snapshot_lvm_lv: lv7 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv7_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv7_mp' }}" snapshot_lvm_mountpoint_create: true - name: Mount the origin for lv6 @@ -131,7 +103,7 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg3 snapshot_lvm_lv: lv6 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv6_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv6_mp' }}" snapshot_lvm_mountpoint_create: true snapshot_lvm_mount_origin: true @@ -143,7 +115,7 @@ snapshot_lvm_action: umount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv1 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv1_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv1_mp' }}" - name: Umount the snapshot for lv2 include_role: @@ -153,7 +125,7 @@ snapshot_lvm_action: umount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv2 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv2_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv2_mp' }}" - name: Umount the snapshot for lv7 include_role: @@ -163,15 +135,14 @@ snapshot_lvm_action: umount snapshot_lvm_vg: test_vg3 snapshot_lvm_lv: lv7 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv7_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv7_mp' }}" - name: Umount the origin for lv6 include_role: name: linux-system-roles.snapshot vars: snapshot_lvm_action: umount - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv6_mp' }}" - + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv6_mp' }}" - name: Run the snapshot role remove the snapshot LVs include_role: @@ -188,37 +159,6 @@ snapshot_lvm_verify_only: true snapshot_lvm_action: remove always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent - - name: lv2 - state: absent - - name: test_vg2 - disks: "{{ disk_list_2 }}" - state: absent - volumes: - - name: lv3 - state: absent - - name: lv4 - state: absent - - name: test_vg3 - disks: "{{ disk_list_3 }}" - state: absent - volumes: - - name: lv5 - state: absent - - name: lv6 - state: absent - - name: lv7 - state: absent - - name: lv8 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_mount_no_vg_fail.yml b/tests/tests_mount_no_vg_fail.yml index 786c55d..0cb958b 100644 --- a/tests/tests_mount_no_vg_fail.yml +++ b/tests/tests_mount_no_vg_fail.yml @@ -1,35 +1,21 @@ --- - name: Verify snapshot mount action fails VG doesn't exist hosts: all + vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "50%" + fs_type: xfs tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk list - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "50%" - fs_type: xfs + - name: Setup + include_tasks: tasks/setup.yml - name: Create snapshot for LV include_role: @@ -40,16 +26,6 @@ snapshot_lvm_snapset_name: snapset1 snapshot_lvm_action: snapshot - - name: Check if system is ostree - stat: - path: /run/ostree-booted - register: __ostree_booted_stat - - - name: Set mount point - set_fact: - __mnt: "{{ __ostree_booted_stat.stat.exists | - ternary('/var/mnt', '/mnt') }}" - - name: Test failure of verifying wrong mount include_tasks: verify-role-failed.yml vars: @@ -60,7 +36,7 @@ snapshot_lvm_action: mount snapshot_lvm_vg: wrong_vg snapshot_lvm_lv: lv1 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv1_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv1_mp' }}" snapshot_lvm_mountpoint_create: true - name: Remove the snapshot LVs @@ -80,15 +56,6 @@ snapshot_lvm_action: remove always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_mount_verify.yml b/tests/tests_mount_verify.yml index 1951b0a..6bf42af 100644 --- a/tests/tests_mount_verify.yml +++ b/tests/tests_mount_verify.yml @@ -1,66 +1,48 @@ --- - name: Basic mount verify snapshot test hosts: all + vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "15%" + fs_type: xfs + - name: lv2 + size: "50%" + fs_type: xfs + - name: test_vg2 + disks: "{{ range(3, 6) | map('extract', unused_disks) | list }}" + volumes: + - name: lv3 + size: "10%" + fs_type: xfs + - name: lv4 + size: "20%" + fs_type: xfs + - name: test_vg3 + disks: "{{ range(6, 10) | map('extract', unused_disks) | list }}" + volumes: + - name: lv5 + size: "30%" + fs_type: xfs + - name: lv6 + size: "25%" + fs_type: xfs + - name: lv7 + size: "10%" + fs_type: xfs + - name: lv8 + size: "10%" + fs_type: xfs tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk lists - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - disk_list_2: "{{ range(3, 6) | map('extract', unused_disks) | - list }}" - disk_list_3: "{{ range(6, 10) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "15%" - fs_type: xfs - - name: lv2 - size: "50%" - fs_type: xfs - - name: test_vg2 - disks: "{{ disk_list_2 }}" - volumes: - - name: lv3 - size: "10%" - fs_type: xfs - - name: lv4 - size: "20%" - fs_type: xfs - - name: test_vg3 - disks: "{{ disk_list_3 }}" - volumes: - - name: lv5 - size: "30%" - fs_type: xfs - - name: lv6 - size: "25%" - fs_type: xfs - - name: lv7 - size: "10%" - fs_type: xfs - - name: lv8 - size: "10%" - fs_type: xfs + - name: Setup + include_tasks: tasks/setup.yml - name: Run the snapshot role to create snapshot LVs include_role: @@ -71,16 +53,6 @@ snapshot_lvm_snapset_name: snapset1 snapshot_lvm_action: snapshot - - name: Check if system is ostree - stat: - path: /run/ostree-booted - register: __ostree_booted_stat - - - name: Set mount point - set_fact: - __mnt: "{{ __ostree_booted_stat.stat.exists | - ternary('/var/mnt', '/mnt') }}" - - name: Verify the snapshot LVs are created include_role: name: linux-system-roles.snapshot @@ -98,7 +70,7 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv1 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv1_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv1_mp' }}" snapshot_lvm_mountpoint_create: true - name: Mount the snapshot for lv2 @@ -109,7 +81,7 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv2 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv2_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv2_mp' }}" snapshot_lvm_mountpoint_create: true - name: Mount the snapshot for lv7 @@ -120,7 +92,7 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg3 snapshot_lvm_lv: lv7 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv7_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv7_mp' }}" snapshot_lvm_mountpoint_create: true - name: Mount the origin for lv6 @@ -131,7 +103,7 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg3 snapshot_lvm_lv: lv6 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv6_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv6_mp' }}" snapshot_lvm_mountpoint_create: true snapshot_lvm_mount_origin: true @@ -143,10 +115,9 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv1 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv1_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv1_mp' }}" snapshot_lvm_verify_only: true - - name: Verify snapshot is mounted for lv2 include_role: name: linux-system-roles.snapshot @@ -155,10 +126,9 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv2 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv2_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv2_mp' }}" snapshot_lvm_verify_only: true - - name: Verify snapshot is mounted for lv7 include_role: name: linux-system-roles.snapshot @@ -167,7 +137,7 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg3 snapshot_lvm_lv: lv7 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv7_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv7_mp' }}" snapshot_lvm_verify_only: true - name: Verify origin is mounted lv6 @@ -178,7 +148,7 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg3 snapshot_lvm_lv: lv6 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv6_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv6_mp' }}" snapshot_lvm_mountpoint_create: true snapshot_lvm_mount_origin: true snapshot_lvm_verify_only: true @@ -191,7 +161,7 @@ snapshot_lvm_action: umount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv1 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv1_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv1_mp' }}" - name: Umount the snapshot for lv2 include_role: @@ -201,7 +171,7 @@ snapshot_lvm_action: umount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv2 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv2_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv2_mp' }}" - name: Umount the snapshot for lv7 include_role: @@ -211,14 +181,14 @@ snapshot_lvm_action: umount snapshot_lvm_vg: test_vg3 snapshot_lvm_lv: lv7 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv7_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv7_mp' }}" - name: Umount the origin for lv6 include_role: name: linux-system-roles.snapshot vars: snapshot_lvm_action: umount - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv6_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv6_mp' }}" - name: Verify umount of the for lv1 include_role: @@ -228,7 +198,7 @@ snapshot_lvm_action: umount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv1 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv1_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv1_mp' }}" snapshot_lvm_verify_only: true - name: Verify umount of the for lv2 @@ -239,7 +209,7 @@ snapshot_lvm_action: umount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv2 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv2_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv2_mp' }}" snapshot_lvm_verify_only: true - name: Verify umount of the for lv7 @@ -250,7 +220,7 @@ snapshot_lvm_action: umount snapshot_lvm_vg: test_vg3 snapshot_lvm_lv: lv7 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv7_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv7_mp' }}" snapshot_lvm_verify_only: true - name: Verify umount of the origin for lv6 @@ -261,7 +231,7 @@ snapshot_lvm_action: umount snapshot_lvm_vg: test_vg3 snapshot_lvm_lv: lv6 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv6_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv6_mp' }}" snapshot_lvm_verify_only: true - name: Run the snapshot role remove the snapshot LVs @@ -279,37 +249,6 @@ snapshot_lvm_verify_only: true snapshot_lvm_action: remove always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent - - name: lv2 - state: absent - - name: test_vg2 - disks: "{{ disk_list_2 }}" - state: absent - volumes: - - name: lv3 - state: absent - - name: lv4 - state: absent - - name: test_vg3 - disks: "{{ disk_list_3 }}" - state: absent - volumes: - - name: lv5 - state: absent - - name: lv6 - state: absent - - name: lv7 - state: absent - - name: lv8 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_mount_verify_fail.yml b/tests/tests_mount_verify_fail.yml index 4ae3ce0..f2967c4 100644 --- a/tests/tests_mount_verify_fail.yml +++ b/tests/tests_mount_verify_fail.yml @@ -1,35 +1,21 @@ --- - name: Verify mount action fails with wrong mount point hosts: all + vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "50%" + fs_type: xfs tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk list - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "50%" - fs_type: xfs + - name: Setup + include_tasks: tasks/setup.yml - name: Create snapshot for LV include_role: @@ -40,16 +26,6 @@ snapshot_lvm_snapset_name: snapset1 snapshot_lvm_action: snapshot - - name: Check if system is ostree - stat: - path: /run/ostree-booted - register: __ostree_booted_stat - - - name: Set mount point - set_fact: - __mnt: "{{ __ostree_booted_stat.stat.exists | - ternary('/var/mnt', '/mnt') }}" - - name: Mount the snapshot for LV include_role: name: linux-system-roles.snapshot @@ -58,7 +34,7 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv1 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv1_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv1_mp' }}" snapshot_lvm_mountpoint_create: true - name: Verify snapshot is mounted for lv1 @@ -69,7 +45,7 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv1 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv1_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv1_mp' }}" snapshot_lvm_verify_only: true - name: Test failure of verifying wrong mount @@ -82,7 +58,8 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv1 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/wrong_mountpoint' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ + '/wrong_mountpoint' }}" snapshot_lvm_verify_only: true - name: Umount the snapshot for lv1 @@ -93,7 +70,7 @@ snapshot_lvm_action: umount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv1 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv1_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv1_mp' }}" - name: Remove the snapshot LVs include_role: @@ -111,15 +88,6 @@ snapshot_lvm_action: remove always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_multi_snapsets.yml b/tests/tests_multi_snapsets.yml index 9962916..66b59e5 100644 --- a/tests/tests_multi_snapsets.yml +++ b/tests/tests_multi_snapsets.yml @@ -1,58 +1,40 @@ --- - name: Basic snapshot test hosts: all + vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "15%" + - name: lv2 + size: "50%" + - name: test_vg2 + disks: "{{ range(3, 6) | map('extract', unused_disks) | list }}" + volumes: + - name: lv3 + size: "10%" + - name: lv4 + size: "20%" + - name: test_vg3 + disks: "{{ range(6, 10) | map('extract', unused_disks) | list }}" + volumes: + - name: lv5 + size: "30%" + - name: lv6 + size: "25%" + - name: lv7 + size: "10%" + - name: lv8 + size: "10%" tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk lists - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - disk_list_2: "{{ range(3, 6) | map('extract', unused_disks) | - list }}" - disk_list_3: "{{ range(6, 10) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "15%" - - name: lv2 - size: "50%" - - name: test_vg2 - disks: "{{ disk_list_2 }}" - volumes: - - name: lv3 - size: "10%" - - name: lv4 - size: "20%" - - name: test_vg3 - disks: "{{ disk_list_3 }}" - volumes: - - name: lv5 - size: "30%" - - name: lv6 - size: "25%" - - name: lv7 - size: "10%" - - name: lv8 - size: "10%" + - name: Setup + include_tasks: tasks/setup.yml - name: Run the snapshot role to create snapshot LVs for snapset1 include_role: @@ -120,37 +102,6 @@ snapshot_lvm_verify_only: true snapshot_lvm_action: remove always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent - - name: lv2 - state: absent - - name: test_vg2 - disks: "{{ disk_list_2 }}" - state: absent - volumes: - - name: lv3 - state: absent - - name: lv4 - state: absent - - name: test_vg3 - disks: "{{ disk_list_3 }}" - state: absent - volumes: - - name: lv5 - state: absent - - name: lv6 - state: absent - - name: lv7 - state: absent - - name: lv8 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_no_space_fail.yml b/tests/tests_no_space_fail.yml index e36c654..32c278f 100644 --- a/tests/tests_no_space_fail.yml +++ b/tests/tests_no_space_fail.yml @@ -1,34 +1,20 @@ --- - name: Verify snapshot action fails if no space is available hosts: all + vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "100%" tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk list - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "100%" + - name: Setup + include_tasks: tasks/setup.yml - name: Test failure of creating snapshot include_tasks: verify-role-failed.yml @@ -41,17 +27,6 @@ snapshot_lvm_snapset_name: snapset1 snapshot_lvm_action: snapshot always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent - - name: lv2 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_revert_basic.yml b/tests/tests_revert_basic.yml index faf8643..dd53f26 100644 --- a/tests/tests_revert_basic.yml +++ b/tests/tests_revert_basic.yml @@ -1,58 +1,40 @@ --- - name: Basic snapshot test hosts: all + vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "15%" + - name: lv2 + size: "50%" + - name: test_vg2 + disks: "{{ range(3, 6) | map('extract', unused_disks) | list }}" + volumes: + - name: lv3 + size: "10%" + - name: lv4 + size: "20%" + - name: test_vg3 + disks: "{{ range(6, 10) | map('extract', unused_disks) | list }}" + volumes: + - name: lv5 + size: "30%" + - name: lv6 + size: "25%" + - name: lv7 + size: "10%" + - name: lv8 + size: "10%" tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk lists - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - disk_list_2: "{{ range(3, 6) | map('extract', unused_disks) | - list }}" - disk_list_3: "{{ range(6, 10) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "15%" - - name: lv2 - size: "50%" - - name: test_vg2 - disks: "{{ disk_list_2 }}" - volumes: - - name: lv3 - size: "10%" - - name: lv4 - size: "20%" - - name: test_vg3 - disks: "{{ disk_list_3 }}" - volumes: - - name: lv5 - size: "30%" - - name: lv6 - size: "25%" - - name: lv7 - size: "10%" - - name: lv8 - size: "10%" + - name: Setup + include_tasks: tasks/setup.yml - name: Run the snapshot role to create snapshot LVs include_role: @@ -89,37 +71,6 @@ snapshot_lvm_verify_only: true snapshot_lvm_action: revert always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent - - name: lv2 - state: absent - - name: test_vg2 - disks: "{{ disk_list_2 }}" - state: absent - volumes: - - name: lv3 - state: absent - - name: lv4 - state: absent - - name: test_vg3 - disks: "{{ disk_list_3 }}" - state: absent - volumes: - - name: lv5 - state: absent - - name: lv6 - state: absent - - name: lv7 - state: absent - - name: lv8 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_set_basic.yml b/tests/tests_set_basic.yml index 2d45bcb..c813a87 100644 --- a/tests/tests_set_basic.yml +++ b/tests/tests_set_basic.yml @@ -2,6 +2,34 @@ - name: Snapshot a set of logical volumes across different volume groups hosts: all vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "15%" + - name: lv2 + size: "50%" + - name: test_vg2 + disks: "{{ range(3, 6) | map('extract', unused_disks) | list }}" + volumes: + - name: lv3 + size: "10%" + - name: lv4 + size: "20%" + - name: test_vg3 + disks: "{{ range(6, 10) | map('extract', unused_disks) | list }}" + volumes: + - name: lv5 + size: "30%" + - name: lv6 + size: "25%" + - name: lv7 + size: "10%" + - name: lv8 + size: "10%" snapshot_test_set: name: snapset1 volumes: @@ -24,55 +52,8 @@ tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk lists - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - disk_list_2: "{{ range(3, 6) | map('extract', unused_disks) | - list }}" - disk_list_3: "{{ range(6, 10) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "15%" - - name: lv2 - size: "50%" - - name: test_vg2 - disks: "{{ disk_list_2 }}" - volumes: - - name: lv3 - size: "10%" - - name: lv4 - size: "20%" - - name: test_vg3 - disks: "{{ disk_list_3 }}" - volumes: - - name: lv5 - size: "30%" - - name: lv6 - size: "25%" - - name: lv7 - size: "10%" - - name: lv8 - size: "10%" + - name: Setup + include_tasks: tasks/setup.yml - name: Run the snapshot role to create snapshot set of LVs include_role: @@ -104,37 +85,6 @@ snapshot_lvm_set: "{{ snapshot_test_set }}" snapshot_lvm_verify_only: true always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent - - name: lv2 - state: absent - - name: test_vg2 - disks: "{{ disk_list_2 }}" - state: absent - volumes: - - name: lv3 - state: absent - - name: lv4 - state: absent - - name: test_vg3 - disks: "{{ disk_list_3 }}" - state: absent - volumes: - - name: lv5 - state: absent - - name: lv6 - state: absent - - name: lv7 - state: absent - - name: lv8 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_set_check_no_lv_fail.yml b/tests/tests_set_check_no_lv_fail.yml index 43b1018..492af00 100644 --- a/tests/tests_set_check_no_lv_fail.yml +++ b/tests/tests_set_check_no_lv_fail.yml @@ -1,34 +1,20 @@ --- - name: Verify the check commmand fails when source LV doesn't exist hosts: all + vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "50%" tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk list - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume group - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "50%" + - name: Setup + include_tasks: tasks/setup.yml - name: Test failure of check with verify set for incorrect LV include_tasks: verify-role-failed.yml @@ -49,15 +35,6 @@ lv: xxxxx percent_space_required: 20 always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_set_check_no_vg_fail.yml b/tests/tests_set_check_no_vg_fail.yml index 9bc1729..d8b4a37 100644 --- a/tests/tests_set_check_no_vg_fail.yml +++ b/tests/tests_set_check_no_vg_fail.yml @@ -1,34 +1,20 @@ --- - name: Verify the check commmand fails when source VG doesn't exist hosts: all + vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "50%" tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk list - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "50%" + - name: Setup + include_tasks: tasks/setup.yml - name: Test failure of check for incorrect VG include_tasks: verify-role-failed.yml @@ -47,15 +33,6 @@ lv: lv1 percent_space_required: 20 always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_set_extend.yml b/tests/tests_set_extend.yml index 80dbe05..f13a522 100644 --- a/tests/tests_set_extend.yml +++ b/tests/tests_set_extend.yml @@ -2,6 +2,34 @@ - name: Revert snapshots of logical volumes across different volume groups hosts: all vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "15%" + - name: lv2 + size: "50%" + - name: test_vg2 + disks: "{{ range(3, 6) | map('extract', unused_disks) | list }}" + volumes: + - name: lv3 + size: "10%" + - name: lv4 + size: "20%" + - name: test_vg3 + disks: "{{ range(6, 10) | map('extract', unused_disks) | list }}" + volumes: + - name: lv5 + size: "30%" + - name: lv6 + size: "25%" + - name: lv7 + size: "10%" + - name: lv8 + size: "10%" snapshot_test_set: name: snapset1 volumes: @@ -21,7 +49,6 @@ vg: test_vg3 lv: lv7 percent_space_required: 15 - snapshot_extend_set: name: snapset1 volumes: @@ -44,55 +71,8 @@ tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk lists - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - disk_list_2: "{{ range(3, 6) | map('extract', unused_disks) | - list }}" - disk_list_3: "{{ range(6, 10) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "15%" - - name: lv2 - size: "50%" - - name: test_vg2 - disks: "{{ disk_list_2 }}" - volumes: - - name: lv3 - size: "10%" - - name: lv4 - size: "20%" - - name: test_vg3 - disks: "{{ disk_list_3 }}" - volumes: - - name: lv5 - size: "30%" - - name: lv6 - size: "25%" - - name: lv7 - size: "10%" - - name: lv8 - size: "10%" + - name: Setup + include_tasks: tasks/setup.yml - name: Run the snapshot role to create a snapshot set of LVs include_role: @@ -125,37 +105,6 @@ snapshot_lvm_set: "{{ snapshot_extend_set }}" always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent - - name: lv2 - state: absent - - name: test_vg2 - disks: "{{ disk_list_2 }}" - state: absent - volumes: - - name: lv3 - state: absent - - name: lv4 - state: absent - - name: test_vg3 - disks: "{{ disk_list_3 }}" - state: absent - volumes: - - name: lv5 - state: absent - - name: lv6 - state: absent - - name: lv7 - state: absent - - name: lv8 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_set_extend_verify_fail.yml b/tests/tests_set_extend_verify_fail.yml index 5096e1a..504ac5b 100644 --- a/tests/tests_set_extend_verify_fail.yml +++ b/tests/tests_set_extend_verify_fail.yml @@ -2,6 +2,14 @@ - name: Verify the extend verify commmand fails when space too low hosts: all vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "50%" snapshot_test_set: name: snapset1 volumes: @@ -19,31 +27,8 @@ tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk list - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "50%" + - name: Setup + include_tasks: tasks/setup.yml - name: Run the snapshot role to create snapshot set of LVs include_role: @@ -72,15 +57,6 @@ __snapshot_lvm_set: "{{ snapshot_test_verify_set }}" always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_set_mount.yml b/tests/tests_set_mount.yml index 1c08eca..c51d0b7 100644 --- a/tests/tests_set_mount.yml +++ b/tests/tests_set_mount.yml @@ -2,96 +2,67 @@ - name: Mount snapshots of logical volumes across different volume groups hosts: all vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "15%" + - name: lv2 + size: "50%" + - name: test_vg2 + disks: "{{ range(3, 6) | map('extract', unused_disks) | list }}" + volumes: + - name: lv3 + size: "10%" + - name: lv4 + size: "20%" + - name: test_vg3 + disks: "{{ range(6, 10) | map('extract', unused_disks) | list }}" + volumes: + - name: lv5 + size: "30%" + - name: lv6 + size: "25%" + - name: lv7 + size: "10%" + - name: lv8 + size: "10%" snapshot_test_set: name: snapset1 volumes: - name: snapshot VG1 LV1 vg: test_vg1 lv: lv1 - mountpoint: "{{ __mnt ~ '/lv1_mp' }}" + mountpoint: "{{ test_mnt_parent ~ '/lv1_mp' }}" percent_space_required: 15 mountpoint_create: true - name: snapshot VG2 LV3 vg: test_vg2 lv: lv3 - mountpoint: "{{ __mnt ~ '/lv3_mp' }}" + mountpoint: "{{ test_mnt_parent ~ '/lv3_mp' }}" percent_space_required: 15 mountpoint_create: true - name: snapshot VG2 LV4 vg: test_vg2 lv: lv4 - mountpoint: "{{ __mnt ~ '/lv4_mp' }}" + mountpoint: "{{ test_mnt_parent ~ '/lv4_mp' }}" percent_space_required: 15 mountpoint_create: true - name: snapshot VG3 LV7 vg: test_vg3 lv: lv7 - mountpoint: "{{ __mnt ~ '/lv7_mp' }}" + mountpoint: "{{ test_mnt_parent ~ '/lv7_mp' }}" percent_space_required: 15 mountpoint_create: true tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk lists - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - disk_list_2: "{{ range(3, 6) | map('extract', unused_disks) | - list }}" - disk_list_3: "{{ range(6, 10) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "15%" - - name: lv2 - size: "50%" - - name: test_vg2 - disks: "{{ disk_list_2 }}" - volumes: - - name: lv3 - size: "10%" - - name: lv4 - size: "20%" - - name: test_vg3 - disks: "{{ disk_list_3 }}" - volumes: - - name: lv5 - size: "30%" - - name: lv6 - size: "25%" - - name: lv7 - size: "10%" - - name: lv8 - size: "10%" - - - name: Check if system is ostree - stat: - path: /run/ostree-booted - register: __ostree_booted_stat - - - name: Set mount point - set_fact: - __mnt: "{{ __ostree_booted_stat.stat.exists | - ternary('/var/mnt', '/mnt') }}" + - name: Setup + include_tasks: tasks/setup.yml - name: Run the snapshot role to create a snapshot set of LVs include_role: @@ -125,37 +96,6 @@ snapshot_lvm_set: "{{ snapshot_test_set }}" always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent - - name: lv2 - state: absent - - name: test_vg2 - disks: "{{ disk_list_2 }}" - state: absent - volumes: - - name: lv3 - state: absent - - name: lv4 - state: absent - - name: test_vg3 - disks: "{{ disk_list_3 }}" - state: absent - volumes: - - name: lv5 - state: absent - - name: lv6 - state: absent - - name: lv7 - state: absent - - name: lv8 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_set_mount_verify_fail.yml b/tests/tests_set_mount_verify_fail.yml index 5096e1a..504ac5b 100644 --- a/tests/tests_set_mount_verify_fail.yml +++ b/tests/tests_set_mount_verify_fail.yml @@ -2,6 +2,14 @@ - name: Verify the extend verify commmand fails when space too low hosts: all vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "50%" snapshot_test_set: name: snapset1 volumes: @@ -19,31 +27,8 @@ tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk list - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "50%" + - name: Setup + include_tasks: tasks/setup.yml - name: Run the snapshot role to create snapshot set of LVs include_role: @@ -72,15 +57,6 @@ __snapshot_lvm_set: "{{ snapshot_test_verify_set }}" always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_set_revert.yml b/tests/tests_set_revert.yml index b069f08..8524b6c 100644 --- a/tests/tests_set_revert.yml +++ b/tests/tests_set_revert.yml @@ -2,6 +2,34 @@ - name: Revert snapshots of logical volumes across different volume groups hosts: all vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "15%" + - name: lv2 + size: "50%" + - name: test_vg2 + disks: "{{ range(3, 6) | map('extract', unused_disks) | list }}" + volumes: + - name: lv3 + size: "10%" + - name: lv4 + size: "20%" + - name: test_vg3 + disks: "{{ range(6, 10) | map('extract', unused_disks) | list }}" + volumes: + - name: lv5 + size: "30%" + - name: lv6 + size: "25%" + - name: lv7 + size: "10%" + - name: lv8 + size: "10%" snapshot_test_set: name: snapset1 volumes: @@ -24,55 +52,8 @@ tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk lists - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - disk_list_2: "{{ range(3, 6) | map('extract', unused_disks) | - list }}" - disk_list_3: "{{ range(6, 10) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "15%" - - name: lv2 - size: "50%" - - name: test_vg2 - disks: "{{ disk_list_2 }}" - volumes: - - name: lv3 - size: "10%" - - name: lv4 - size: "20%" - - name: test_vg3 - disks: "{{ disk_list_3 }}" - volumes: - - name: lv5 - size: "30%" - - name: lv6 - size: "25%" - - name: lv7 - size: "10%" - - name: lv8 - size: "10%" + - name: Setup + include_tasks: tasks/setup.yml - name: Run the snapshot role to create a snapshot set of LVs include_role: @@ -105,37 +86,6 @@ snapshot_lvm_set: "{{ snapshot_test_set }}" always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent - - name: lv2 - state: absent - - name: test_vg2 - disks: "{{ disk_list_2 }}" - state: absent - volumes: - - name: lv3 - state: absent - - name: lv4 - state: absent - - name: test_vg3 - disks: "{{ disk_list_3 }}" - state: absent - volumes: - - name: lv5 - state: absent - - name: lv6 - state: absent - - name: lv7 - state: absent - - name: lv8 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_set_revert_no_snapshots_fail.yml b/tests/tests_set_revert_no_snapshots_fail.yml index c920fd8..40ea366 100644 --- a/tests/tests_set_revert_no_snapshots_fail.yml +++ b/tests/tests_set_revert_no_snapshots_fail.yml @@ -1,34 +1,20 @@ --- - name: Verify snapshot action fails if no space is available hosts: all + vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "50%" tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk list - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "50%" + - name: Setup + include_tasks: tasks/setup.yml - name: Test failure of reverting snapshot that doesn't exist include_tasks: verify-role-failed.yml @@ -45,17 +31,6 @@ lv: lv1 percent_space_required: 50 always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent - - name: lv2 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_set_snapshot_invalid_param.yml b/tests/tests_set_snapshot_invalid_param.yml index 3bdbedf..281e545 100644 --- a/tests/tests_set_snapshot_invalid_param.yml +++ b/tests/tests_set_snapshot_invalid_param.yml @@ -1,34 +1,20 @@ --- - name: Verify snapshot action fails if percent_space_required param negative hosts: all + vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "100%" tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk list - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "100%" + - name: Setup + include_tasks: tasks/setup.yml - name: Test failure of creating snapshot that has negative space include_tasks: verify-role-failed.yml @@ -48,17 +34,6 @@ lv: lv1 percent_space_required: -20 always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent - - name: lv2 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_set_snapshot_missing_param.yml b/tests/tests_set_snapshot_missing_param.yml index f3aca10..c421a18 100644 --- a/tests/tests_set_snapshot_missing_param.yml +++ b/tests/tests_set_snapshot_missing_param.yml @@ -1,34 +1,20 @@ --- - name: Verify snapshot action fails if missing parameter hosts: all + vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "100%" tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk list - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "100%" + - name: Setup + include_tasks: tasks/setup.yml - name: Test failure of creating snapshot that is missing parameter include_tasks: verify-role-failed.yml @@ -47,17 +33,6 @@ vg: test_vg1 lv: lv1 always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent - - name: lv2 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_set_snapshot_no_space_fail.yml b/tests/tests_set_snapshot_no_space_fail.yml index 48d678c..36c05ea 100644 --- a/tests/tests_set_snapshot_no_space_fail.yml +++ b/tests/tests_set_snapshot_no_space_fail.yml @@ -1,34 +1,20 @@ --- - name: Verify snapshot action fails if no space is available hosts: all + vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "100%" tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk list - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "100%" + - name: Setup + include_tasks: tasks/setup.yml - name: Test failure of creating snapshot that is part of a set include_tasks: verify-role-failed.yml @@ -46,17 +32,6 @@ lv: lv1 percent_space_required: 50 always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent - - name: lv2 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_single_lv.yml b/tests/tests_single_lv.yml index d140de2..10ff6f7 100644 --- a/tests/tests_single_lv.yml +++ b/tests/tests_single_lv.yml @@ -1,58 +1,40 @@ --- - name: Single LV snapshot test hosts: all + vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "15%" + - name: lv2 + size: "50%" + - name: test_vg2 + disks: "{{ range(3, 6) | map('extract', unused_disks) | list }}" + volumes: + - name: lv3 + size: "10%" + - name: lv4 + size: "20%" + - name: test_vg3 + disks: "{{ range(6, 10) | map('extract', unused_disks) | list }}" + volumes: + - name: lv5 + size: "30%" + - name: lv6 + size: "25%" + - name: lv7 + size: "10%" + - name: lv8 + size: "10%" tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk lists - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - disk_list_2: "{{ range(3, 6) | map('extract', unused_disks) | - list }}" - disk_list_3: "{{ range(6, 10) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "15%" - - name: lv2 - size: "50%" - - name: test_vg2 - disks: "{{ disk_list_2 }}" - volumes: - - name: lv3 - size: "10%" - - name: lv4 - size: "20%" - - name: test_vg3 - disks: "{{ disk_list_3 }}" - volumes: - - name: lv5 - size: "30%" - - name: lv6 - size: "25%" - - name: lv7 - size: "10%" - - name: lv8 - size: "10%" + - name: Setup + include_tasks: tasks/setup.yml - name: Run the snapshot role to create snapshot of single LV include_role: @@ -94,37 +76,6 @@ snapshot_lvm_lv: lv1 snapshot_lvm_action: remove always: - - name: Remove up storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent - - name: lv2 - state: absent - - name: test_vg2 - disks: "{{ disk_list_2 }}" - state: absent - volumes: - - name: lv3 - state: absent - - name: lv4 - state: absent - - name: test_vg3 - disks: "{{ disk_list_3 }}" - state: absent - volumes: - - name: lv5 - state: absent - - name: lv6 - state: absent - - name: lv7 - state: absent - - name: lv8 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_single_vg.yml b/tests/tests_single_vg.yml index 245188c..7369d51 100644 --- a/tests/tests_single_vg.yml +++ b/tests/tests_single_vg.yml @@ -1,58 +1,40 @@ --- - name: Single VG snapshot test hosts: all + vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "15%" + - name: lv2 + size: "50%" + - name: test_vg2 + disks: "{{ range(3, 6) | map('extract', unused_disks) | list }}" + volumes: + - name: lv3 + size: "10%" + - name: lv4 + size: "20%" + - name: test_vg3 + disks: "{{ range(6, 10) | map('extract', unused_disks) | list }}" + volumes: + - name: lv5 + size: "30%" + - name: lv6 + size: "25%" + - name: lv7 + size: "10%" + - name: lv8 + size: "10%" tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk lists - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - disk_list_2: "{{ range(3, 6) | map('extract', unused_disks) | - list }}" - disk_list_3: "{{ range(6, 10) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "15%" - - name: lv2 - size: "50%" - - name: test_vg2 - disks: "{{ disk_list_2 }}" - volumes: - - name: lv3 - size: "10%" - - name: lv4 - size: "20%" - - name: test_vg3 - disks: "{{ disk_list_3 }}" - volumes: - - name: lv5 - size: "30%" - - name: lv6 - size: "25%" - - name: lv7 - size: "10%" - - name: lv8 - size: "10%" + - name: Setup + include_tasks: tasks/setup.yml - name: Run the snapshot role to create snapshot of single LV include_role: @@ -90,37 +72,6 @@ snapshot_lvm_vg: test_vg1 snapshot_lvm_action: remove always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent - - name: lv2 - state: absent - - name: test_vg2 - disks: "{{ disk_list_2 }}" - state: absent - volumes: - - name: lv3 - state: absent - - name: lv4 - state: absent - - name: test_vg3 - disks: "{{ disk_list_3 }}" - state: absent - volumes: - - name: lv5 - state: absent - - name: lv6 - state: absent - - name: lv7 - state: absent - - name: lv8 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_umount_verify.yml b/tests/tests_umount_verify.yml index dbaddb9..f2083ae 100644 --- a/tests/tests_umount_verify.yml +++ b/tests/tests_umount_verify.yml @@ -1,66 +1,48 @@ --- - name: Basic umount verify snapshot test hosts: all + vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "15%" + fs_type: xfs + - name: lv2 + size: "50%" + fs_type: xfs + - name: test_vg2 + disks: "{{ range(3, 6) | map('extract', unused_disks) | list }}" + volumes: + - name: lv3 + size: "10%" + fs_type: xfs + - name: lv4 + size: "20%" + fs_type: xfs + - name: test_vg3 + disks: "{{ range(6, 10) | map('extract', unused_disks) | list }}" + volumes: + - name: lv5 + size: "30%" + fs_type: xfs + - name: lv6 + size: "25%" + fs_type: xfs + - name: lv7 + size: "10%" + fs_type: xfs + - name: lv8 + size: "10%" + fs_type: xfs tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk lists - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - disk_list_2: "{{ range(3, 6) | map('extract', unused_disks) | - list }}" - disk_list_3: "{{ range(6, 10) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "15%" - fs_type: xfs - - name: lv2 - size: "50%" - fs_type: xfs - - name: test_vg2 - disks: "{{ disk_list_2 }}" - volumes: - - name: lv3 - size: "10%" - fs_type: xfs - - name: lv4 - size: "20%" - fs_type: xfs - - name: test_vg3 - disks: "{{ disk_list_3 }}" - volumes: - - name: lv5 - size: "30%" - fs_type: xfs - - name: lv6 - size: "25%" - fs_type: xfs - - name: lv7 - size: "10%" - fs_type: xfs - - name: lv8 - size: "10%" - fs_type: xfs + - name: Setup + include_tasks: tasks/setup.yml - name: Run the snapshot role to create snapshot LVs include_role: @@ -71,10 +53,6 @@ snapshot_lvm_snapset_name: snapset1 snapshot_lvm_action: snapshot - - name: Set mountpoint root directory - set_fact: - __mnt: "{{ __snapshot_is_ostree | ternary('/var/mnt', '/mnt') }}" - - name: Verify the snapshot LVs are created include_role: name: linux-system-roles.snapshot @@ -92,7 +70,7 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv1 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv1_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv1_mp' }}" snapshot_lvm_mountpoint_create: true - name: Mount the snapshot for lv2 @@ -103,7 +81,7 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv2 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv2_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv2_mp' }}" snapshot_lvm_mountpoint_create: true - name: Mount the snapshot for lv7 @@ -114,7 +92,7 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg3 snapshot_lvm_lv: lv7 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv7_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv7_mp' }}" snapshot_lvm_mountpoint_create: true - name: Mount the origin for lv6 @@ -125,7 +103,7 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg3 snapshot_lvm_lv: lv6 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv6_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv6_mp' }}" snapshot_lvm_mountpoint_create: true snapshot_lvm_mount_origin: true @@ -137,7 +115,7 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv1 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv1_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv1_mp' }}" snapshot_lvm_verify_only: true - name: Verify snapshot is mounted for lv2 @@ -148,7 +126,7 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv2 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv2_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv2_mp' }}" snapshot_lvm_verify_only: true - name: Verify snapshot is mounted for lv7 @@ -159,7 +137,7 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg3 snapshot_lvm_lv: lv7 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv7_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv7_mp' }}" snapshot_lvm_verify_only: true - name: Verify origin is mounted lv6 @@ -170,7 +148,7 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg3 snapshot_lvm_lv: lv6 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv6_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv6_mp' }}" snapshot_lvm_mountpoint_create: true snapshot_lvm_mount_origin: true snapshot_lvm_verify_only: true @@ -183,7 +161,7 @@ snapshot_lvm_action: umount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv1 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv1_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv1_mp' }}" - name: Umount the snapshot for lv2 include_role: @@ -193,7 +171,7 @@ snapshot_lvm_action: umount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv2 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv2_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv2_mp' }}" - name: Umount the snapshot for lv7 include_role: @@ -203,14 +181,14 @@ snapshot_lvm_action: umount snapshot_lvm_vg: test_vg3 snapshot_lvm_lv: lv7 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv7_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv7_mp' }}" - name: Umount the origin for lv6 include_role: name: linux-system-roles.snapshot vars: snapshot_lvm_action: umount - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv6_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv6_mp' }}" - name: Run the snapshot role remove the snapshot LVs include_role: @@ -227,37 +205,6 @@ snapshot_lvm_verify_only: true snapshot_lvm_action: remove always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent - - name: lv2 - state: absent - - name: test_vg2 - disks: "{{ disk_list_2 }}" - state: absent - volumes: - - name: lv3 - state: absent - - name: lv4 - state: absent - - name: test_vg3 - disks: "{{ disk_list_3 }}" - state: absent - volumes: - - name: lv5 - state: absent - - name: lv6 - state: absent - - name: lv7 - state: absent - - name: lv8 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/tests_umount_verify_fail.yml b/tests/tests_umount_verify_fail.yml index 9e02d0c..c911560 100644 --- a/tests/tests_umount_verify_fail.yml +++ b/tests/tests_umount_verify_fail.yml @@ -1,35 +1,21 @@ --- - name: Verify umount action when fs still mounted hosts: all + vars: + test_disk_min_size: "1g" + test_disk_count: 10 + test_storage_pools: + - name: test_vg1 + disks: "{{ range(0, 3) | map('extract', unused_disks) | list }}" + volumes: + - name: lv1 + size: "50%" + fs_type: xfs tasks: - name: Run tests block: - - name: Run the storage role to create test LVs - include_role: - name: fedora.linux_system_roles.storage - - - name: Get unused disks - include_tasks: get_unused_disk.yml - vars: - min_size: "1g" - min_return: 10 - - - name: Set disk list - set_fact: - disk_list_1: "{{ range(0, 3) | map('extract', unused_disks) | - list }}" - - - name: Create LVM logical volumes under volume groups - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - volumes: - - name: lv1 - size: "50%" - fs_type: xfs + - name: Setup + include_tasks: tasks/setup.yml - name: Create snapshot for LV include_role: @@ -40,16 +26,6 @@ snapshot_lvm_snapset_name: snapset1 snapshot_lvm_action: snapshot - - name: Check if system is ostree - stat: - path: /run/ostree-booted - register: __ostree_booted_stat - - - name: Set mount point - set_fact: - __mnt: "{{ __ostree_booted_stat.stat.exists | - ternary('/var/mnt', '/mnt') }}" - - name: Mount the snapshot for LV include_role: name: linux-system-roles.snapshot @@ -58,7 +34,7 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv1 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv1_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv1_mp' }}" snapshot_lvm_mountpoint_create: true - name: Verify snapshot is mounted for lv1 @@ -69,7 +45,7 @@ snapshot_lvm_action: mount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv1 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv1_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv1_mp' }}" snapshot_lvm_verify_only: true - name: Test failure of verifying umount when fs mounted @@ -82,7 +58,7 @@ snapshot_lvm_action: umount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv1 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv1_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv1_mp' }}" snapshot_lvm_verify_only: true - name: Umount the snapshot for lv1 @@ -93,7 +69,7 @@ snapshot_lvm_action: umount snapshot_lvm_vg: test_vg1 snapshot_lvm_lv: lv1 - snapshot_lvm_mountpoint: "{{ __mnt ~ '/lv1_mp' }}" + snapshot_lvm_mountpoint: "{{ test_mnt_parent ~ '/lv1_mp' }}" - name: Remove the snapshot LVs include_role: @@ -111,15 +87,6 @@ snapshot_lvm_action: remove always: - - name: Remove storage volumes - include_role: - name: fedora.linux_system_roles.storage - vars: - storage_safe_mode: false - storage_pools: - - name: test_vg1 - disks: "{{ disk_list_1 }}" - state: absent - volumes: - - name: lv1 - state: absent + - name: Cleanup + include_tasks: tasks/cleanup.yml + tags: tests::cleanup diff --git a/tests/verify-role-failed.yml b/tests/verify-role-failed.yml index 67ec4d4..6048297 100644 --- a/tests/verify-role-failed.yml +++ b/tests/verify-role-failed.yml @@ -1,11 +1,11 @@ --- - name: Verify role fails when expected block: - - name: Show __snapshot_failed_params debug: var: __snapshot_failed_params verbosity: 1 + - name: Verify role returns error include_role: name: linux-system-roles.snapshot @@ -49,6 +49,7 @@ snapshot_lvm_mount_options: "{{ __snapshot_failed_params.get('snapshot_lvm_mount_options') }}" + - name: Unreachable task fail: msg: UNREACH