# STDOUT: ---v---v---v---v---v--- ansible-playbook 2.9.27 config file = /etc/ansible/ansible.cfg configured module search path = ['/home/jenkins/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /opt/ansible-2.9/lib/python3.9/site-packages/ansible executable location = /opt/ansible-2.9/bin/ansible-playbook python version = 3.9.18 (main, Sep 7 2023, 00:00:00) [GCC 11.4.1 20230605 (Red Hat 11.4.1-2)] Using /etc/ansible/ansible.cfg as config file Skipping callback 'actionable', as we already have a stdout callback. Skipping callback 'counter_enabled', as we already have a stdout callback. Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'dense', as we already have a stdout callback. Skipping callback 'dense', as we already have a stdout callback. Skipping callback 'full_skip', as we already have a stdout callback. Skipping callback 'json', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'null', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. Skipping callback 'selective', as we already have a stdout callback. Skipping callback 'skippy', as we already have a stdout callback. Skipping callback 'stderr', as we already have a stdout callback. Skipping callback 'unixy', as we already have a stdout callback. Skipping callback 'yaml', as we already have a stdout callback. PLAYBOOK: tests_set_extend.yml ************************************************* 1 plays in /WORKDIR/git-module_workd5cejf_z/tests/tests_set_extend.yml PLAY [Revert snapshots of logical volumes across different volume groups] ****** TASK [Gathering Facts] ********************************************************* task path: /WORKDIR/git-module_workd5cejf_z/tests/tests_set_extend.yml:2 Saturday 13 July 2024 22:11:07 +0000 (0:00:00.012) 0:00:00.012 ********* ok: [sut] META: ran handlers TASK [Setup] ******************************************************************* task path: /WORKDIR/git-module_workd5cejf_z/tests/tests_set_extend.yml:74 Saturday 13 July 2024 22:11:07 +0000 (0:00:00.792) 0:00:00.804 ********* included: /WORKDIR/git-module_workd5cejf_z/tests/tasks/setup.yml for sut TASK [Check if system is ostree] *********************************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/tasks/setup.yml:10 Saturday 13 July 2024 22:11:07 +0000 (0:00:00.040) 0:00:00.844 ********* ok: [sut] => { "changed": false, "stat": { "exists": false } } TASK [Set mount parent] ******************************************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/tasks/setup.yml:15 Saturday 13 July 2024 22:11:08 +0000 (0:00:00.281) 0:00:01.126 ********* ok: [sut] => { "ansible_facts": { "test_mnt_parent": "/mnt" }, "changed": false } TASK [Run the storage role install base packages] ****************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/tasks/setup.yml:20 Saturday 13 July 2024 22:11:08 +0000 (0:00:00.020) 0:00:01.146 ********* TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Saturday 13 July 2024 22:11:08 +0000 (0:00:00.016) 0:00:01.162 ********* included: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for sut TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Saturday 13 July 2024 22:11:08 +0000 (0:00:00.050) 0:00:01.213 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Saturday 13 July 2024 22:11:08 +0000 (0:00:00.022) 0:00:01.235 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs", "stratisd", "stratis-cli" ] }, "ansible_included_var_files": [ "/WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [sut] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs", "stratisd", "stratis-cli" ] }, "ansible_included_var_files": [ "/WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Saturday 13 July 2024 22:11:08 +0000 (0:00:00.041) 0:00:01.277 ********* ok: [sut] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Saturday 13 July 2024 22:11:08 +0000 (0:00:00.177) 0:00:01.455 ********* ok: [sut] => { "ansible_facts": { "__storage_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Saturday 13 July 2024 22:11:08 +0000 (0:00:00.021) 0:00:01.477 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Saturday 13 July 2024 22:11:08 +0000 (0:00:00.010) 0:00:01.487 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Saturday 13 July 2024 22:11:08 +0000 (0:00:00.009) 0:00:01.497 ********* included: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for sut TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Saturday 13 July 2024 22:11:08 +0000 (0:00:00.038) 0:00:01.535 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Saturday 13 July 2024 22:11:10 +0000 (0:00:02.423) 0:00:03.959 ********* ok: [sut] => { "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined" } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Saturday 13 July 2024 22:11:11 +0000 (0:00:00.019) 0:00:03.979 ********* ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Saturday 13 July 2024 22:11:11 +0000 (0:00:00.019) 0:00:03.998 ********* ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31 Saturday 13 July 2024 22:11:11 +0000 (0:00:00.427) 0:00:04.426 ********* included: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for sut TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Saturday 13 July 2024 22:11:11 +0000 (0:00:00.035) 0:00:04.462 ********* TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Saturday 13 July 2024 22:11:11 +0000 (0:00:00.011) 0:00:04.474 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************ task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19 Saturday 13 July 2024 22:11:11 +0000 (0:00:00.010) 0:00:04.484 ********* TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 Saturday 13 July 2024 22:11:11 +0000 (0:00:00.009) 0:00:04.494 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51 Saturday 13 July 2024 22:11:13 +0000 (0:00:02.368) 0:00:06.863 ********* ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "running", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "lvm2-pvscan@8:0.service": { "name": "lvm2-pvscan@8:0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:112.service": { "name": "lvm2-pvscan@8:112.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:128.service": { "name": "lvm2-pvscan@8:128.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:144.service": { "name": "lvm2-pvscan@8:144.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:16.service": { "name": "lvm2-pvscan@8:16.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:32.service": { "name": "lvm2-pvscan@8:32.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:48.service": { "name": "lvm2-pvscan@8:48.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:64.service": { "name": "lvm2-pvscan@8:64.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:80.service": { "name": "lvm2-pvscan@8:80.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:96.service": { "name": "lvm2-pvscan@8:96.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "stratis-fstab-setup@.service": { "name": "stratis-fstab-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratisd-min-postinitrd.service": { "name": "stratisd-min-postinitrd.service", "source": "systemd", "state": "inactive", "status": "static" }, "stratisd.service": { "name": "stratisd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" }, "vdo-start-by-dev@.service": { "name": "vdo-start-by-dev@.service", "source": "systemd", "state": "unknown", "status": "static" }, "vdo.service": { "name": "vdo.service", "source": "systemd", "state": "stopped", "status": "enabled" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57 Saturday 13 July 2024 22:11:15 +0000 (0:00:01.506) 0:00:08.369 ********* ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63 Saturday 13 July 2024 22:11:15 +0000 (0:00:00.019) 0:00:08.388 ********* TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 Saturday 13 July 2024 22:11:15 +0000 (0:00:00.010) 0:00:08.399 ********* ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83 Saturday 13 July 2024 22:11:15 +0000 (0:00:00.332) 0:00:08.731 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95 Saturday 13 July 2024 22:11:15 +0000 (0:00:00.013) 0:00:08.745 ********* TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:101 Saturday 13 July 2024 22:11:15 +0000 (0:00:00.011) 0:00:08.756 ********* ok: [sut] => { "blivet_output": { "actions": [], "changed": false, "crypts": [], "failed": false, "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:110 Saturday 13 July 2024 22:11:15 +0000 (0:00:00.014) 0:00:08.771 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:114 Saturday 13 July 2024 22:11:15 +0000 (0:00:00.013) 0:00:08.785 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Saturday 13 July 2024 22:11:15 +0000 (0:00:00.013) 0:00:08.799 ********* TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:141 Saturday 13 July 2024 22:11:15 +0000 (0:00:00.011) 0:00:08.810 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:146 Saturday 13 July 2024 22:11:15 +0000 (0:00:00.011) 0:00:08.821 ********* TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:157 Saturday 13 July 2024 22:11:15 +0000 (0:00:00.010) 0:00:08.832 ********* TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:169 Saturday 13 July 2024 22:11:15 +0000 (0:00:00.011) 0:00:08.844 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Saturday 13 July 2024 22:11:15 +0000 (0:00:00.024) 0:00:08.868 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1720907881.200353, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1716968941.893, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 135, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1716968586.525, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "1157759751", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:182 Saturday 13 July 2024 22:11:16 +0000 (0:00:00.180) 0:00:09.048 ********* TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:204 Saturday 13 July 2024 22:11:16 +0000 (0:00:00.011) 0:00:09.059 ********* ok: [sut] TASK [Get unused disks] ******************************************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/tasks/setup.yml:26 Saturday 13 July 2024 22:11:16 +0000 (0:00:00.559) 0:00:09.619 ********* included: /WORKDIR/git-module_workd5cejf_z/tests/get_unused_disk.yml for sut TASK [Check if system is ostree] *********************************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/get_unused_disk.yml:5 Saturday 13 July 2024 22:11:16 +0000 (0:00:00.018) 0:00:09.638 ********* ok: [sut] => { "changed": false, "stat": { "exists": false } } TASK [Set flag to indicate system is ostree] *********************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/get_unused_disk.yml:10 Saturday 13 July 2024 22:11:16 +0000 (0:00:00.169) 0:00:09.808 ********* ok: [sut] => { "ansible_facts": { "__snapshot_is_ostree": false }, "changed": false } TASK [Ensure test packages] **************************************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/get_unused_disk.yml:14 Saturday 13 July 2024 22:11:16 +0000 (0:00:00.014) 0:00:09.822 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [Find unused disks in the system] ***************************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/get_unused_disk.yml:28 Saturday 13 July 2024 22:11:19 +0000 (0:00:02.369) 0:00:12.192 ********* ok: [sut] => { "changed": false, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi", "sdj" ], "info": [] } TASK [Set unused_disks if necessary] ******************************************* task path: /WORKDIR/git-module_workd5cejf_z/tests/get_unused_disk.yml:36 Saturday 13 July 2024 22:11:19 +0000 (0:00:00.247) 0:00:12.440 ********* ok: [sut] => { "ansible_facts": { "unused_disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi", "sdj" ] }, "changed": false } TASK [Print unused disks] ****************************************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/get_unused_disk.yml:41 Saturday 13 July 2024 22:11:19 +0000 (0:00:00.015) 0:00:12.456 ********* ok: [sut] => { "unused_disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi", "sdj" ] } TASK [Print info from find_unused_disk] **************************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/get_unused_disk.yml:49 Saturday 13 July 2024 22:11:19 +0000 (0:00:00.013) 0:00:12.469 ********* skipping: [sut] => {} TASK [Show disk information] *************************************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/get_unused_disk.yml:54 Saturday 13 July 2024 22:11:19 +0000 (0:00:00.012) 0:00:12.482 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Exit playbook when there's not enough unused disks in the system] ******** task path: /WORKDIR/git-module_workd5cejf_z/tests/get_unused_disk.yml:63 Saturday 13 July 2024 22:11:19 +0000 (0:00:00.012) 0:00:12.495 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Create LVM logical volumes under volume groups] ************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/tasks/setup.yml:32 Saturday 13 July 2024 22:11:19 +0000 (0:00:00.013) 0:00:12.508 ********* TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Saturday 13 July 2024 22:11:19 +0000 (0:00:00.028) 0:00:12.537 ********* included: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for sut TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Saturday 13 July 2024 22:11:19 +0000 (0:00:00.018) 0:00:12.555 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Saturday 13 July 2024 22:11:19 +0000 (0:00:00.013) 0:00:12.569 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs", "stratisd", "stratis-cli" ] }, "ansible_included_var_files": [ "/WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [sut] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs", "stratisd", "stratis-cli" ] }, "ansible_included_var_files": [ "/WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Saturday 13 July 2024 22:11:19 +0000 (0:00:00.030) 0:00:12.600 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Saturday 13 July 2024 22:11:19 +0000 (0:00:00.011) 0:00:12.611 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Saturday 13 July 2024 22:11:19 +0000 (0:00:00.012) 0:00:12.623 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Saturday 13 July 2024 22:11:19 +0000 (0:00:00.010) 0:00:12.634 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Saturday 13 July 2024 22:11:19 +0000 (0:00:00.011) 0:00:12.646 ********* included: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for sut TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Saturday 13 July 2024 22:11:19 +0000 (0:00:00.030) 0:00:12.676 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Saturday 13 July 2024 22:11:22 +0000 (0:00:02.353) 0:00:15.029 ********* ok: [sut] => { "storage_pools": [ { "disks": [ "sda", "sdb", "sdc" ], "name": "test_vg1", "volumes": [ { "name": "lv1", "size": "15%" }, { "name": "lv2", "size": "50%" } ] }, { "disks": [ "sdd", "sde", "sdf" ], "name": "test_vg2", "volumes": [ { "name": "lv3", "size": "10%" }, { "name": "lv4", "size": "20%" } ] }, { "disks": [ "sdg", "sdh", "sdi", "sdj" ], "name": "test_vg3", "volumes": [ { "name": "lv5", "size": "30%" }, { "name": "lv6", "size": "25%" }, { "name": "lv7", "size": "10%" }, { "name": "lv8", "size": "10%" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Saturday 13 July 2024 22:11:22 +0000 (0:00:00.019) 0:00:15.049 ********* ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Saturday 13 July 2024 22:11:22 +0000 (0:00:00.012) 0:00:15.062 ********* ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "lvm2" ], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31 Saturday 13 July 2024 22:11:25 +0000 (0:00:03.847) 0:00:18.910 ********* included: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for sut TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Saturday 13 July 2024 22:11:25 +0000 (0:00:00.022) 0:00:18.932 ********* TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Saturday 13 July 2024 22:11:25 +0000 (0:00:00.011) 0:00:18.943 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************ task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19 Saturday 13 July 2024 22:11:25 +0000 (0:00:00.011) 0:00:18.955 ********* TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 Saturday 13 July 2024 22:11:25 +0000 (0:00:00.011) 0:00:18.966 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51 Saturday 13 July 2024 22:11:28 +0000 (0:00:02.367) 0:00:21.334 ********* ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "running", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "lvm2-pvscan@8:0.service": { "name": "lvm2-pvscan@8:0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:112.service": { "name": "lvm2-pvscan@8:112.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:128.service": { "name": "lvm2-pvscan@8:128.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:144.service": { "name": "lvm2-pvscan@8:144.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:16.service": { "name": "lvm2-pvscan@8:16.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:32.service": { "name": "lvm2-pvscan@8:32.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:48.service": { "name": "lvm2-pvscan@8:48.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:64.service": { "name": "lvm2-pvscan@8:64.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:80.service": { "name": "lvm2-pvscan@8:80.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:96.service": { "name": "lvm2-pvscan@8:96.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "stratis-fstab-setup@.service": { "name": "stratis-fstab-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratisd-min-postinitrd.service": { "name": "stratisd-min-postinitrd.service", "source": "systemd", "state": "inactive", "status": "static" }, "stratisd.service": { "name": "stratisd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" }, "vdo-start-by-dev@.service": { "name": "vdo-start-by-dev@.service", "source": "systemd", "state": "unknown", "status": "static" }, "vdo.service": { "name": "vdo.service", "source": "systemd", "state": "stopped", "status": "enabled" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57 Saturday 13 July 2024 22:11:29 +0000 (0:00:01.469) 0:00:22.804 ********* ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63 Saturday 13 July 2024 22:11:29 +0000 (0:00:00.019) 0:00:22.823 ********* TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 Saturday 13 July 2024 22:11:29 +0000 (0:00:00.011) 0:00:22.835 ********* changed: [sut] => { "actions": [ { "action": "create format", "device": "/dev/sdj", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdi", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdh", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdg", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/test_vg3", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/test_vg3-lv8", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg3-lv8", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg3-lv7", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg3-lv7", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg3-lv6", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg3-lv6", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg3-lv5", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg3-lv5", "fs_type": "xfs" }, { "action": "create format", "device": "/dev/sdf", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sde", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdd", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/test_vg2", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/test_vg2-lv4", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg2-lv4", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg2-lv3", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg2-lv3", "fs_type": "xfs" }, { "action": "create format", "device": "/dev/sdc", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdb", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sda", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/test_vg1", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/test_vg1-lv2", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg1-lv2", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg1-lv1", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg1-lv1", "fs_type": "xfs" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sdk", "/dev/sdl", "/dev/xvda1", "/dev/mapper/test_vg1-lv1", "/dev/mapper/test_vg1-lv2", "/dev/mapper/test_vg2-lv3", "/dev/mapper/test_vg2-lv4", "/dev/mapper/test_vg3-lv5", "/dev/mapper/test_vg3-lv6", "/dev/mapper/test_vg3-lv7", "/dev/mapper/test_vg3-lv8" ], "mounts": [], "packages": [ "lvm2", "xfsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg1-lv1", "_kernel_device": "/dev/dm-7", "_mount_id": "/dev/mapper/test_vg1-lv1", "_raw_device": "/dev/mapper/test_vg1-lv1", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "15%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg1-lv2", "_kernel_device": "/dev/dm-6", "_mount_id": "/dev/mapper/test_vg1-lv2", "_raw_device": "/dev/mapper/test_vg1-lv2", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "50%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg2", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg2-lv3", "_kernel_device": "/dev/dm-5", "_mount_id": "/dev/mapper/test_vg2-lv3", "_raw_device": "/dev/mapper/test_vg2-lv3", "_raw_kernel_device": "/dev/dm-5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv3", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg2-lv4", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/test_vg2-lv4", "_raw_device": "/dev/mapper/test_vg2-lv4", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv4", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "20%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg3", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg3-lv5", "_kernel_device": "/dev/dm-3", "_mount_id": "/dev/mapper/test_vg3-lv5", "_raw_device": "/dev/mapper/test_vg3-lv5", "_raw_kernel_device": "/dev/dm-3", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv5", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "30%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv6", "_kernel_device": "/dev/dm-2", "_mount_id": "/dev/mapper/test_vg3-lv6", "_raw_device": "/dev/mapper/test_vg3-lv6", "_raw_kernel_device": "/dev/dm-2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv6", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "25%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv7", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/test_vg3-lv7", "_raw_device": "/dev/mapper/test_vg3-lv7", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv7", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv8", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/test_vg3-lv8", "_raw_device": "/dev/mapper/test_vg3-lv8", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv8", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83 Saturday 13 July 2024 22:11:38 +0000 (0:00:08.696) 0:00:31.531 ********* ok: [sut] => { "changed": false, "cmd": [ "udevadm", "trigger", "--subsystem-match=block" ], "delta": "0:00:00.017664", "end": "2024-07-13 22:11:38.801569", "rc": 0, "start": "2024-07-13 22:11:38.783905" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95 Saturday 13 July 2024 22:11:38 +0000 (0:00:00.271) 0:00:31.803 ********* TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:101 Saturday 13 July 2024 22:11:38 +0000 (0:00:00.011) 0:00:31.815 ********* ok: [sut] => { "blivet_output": { "actions": [ { "action": "create format", "device": "/dev/sdj", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdi", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdh", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdg", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/test_vg3", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/test_vg3-lv8", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg3-lv8", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg3-lv7", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg3-lv7", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg3-lv6", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg3-lv6", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg3-lv5", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg3-lv5", "fs_type": "xfs" }, { "action": "create format", "device": "/dev/sdf", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sde", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdd", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/test_vg2", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/test_vg2-lv4", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg2-lv4", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg2-lv3", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg2-lv3", "fs_type": "xfs" }, { "action": "create format", "device": "/dev/sdc", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdb", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sda", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/test_vg1", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/test_vg1-lv2", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg1-lv2", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg1-lv1", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg1-lv1", "fs_type": "xfs" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sdk", "/dev/sdl", "/dev/xvda1", "/dev/mapper/test_vg1-lv1", "/dev/mapper/test_vg1-lv2", "/dev/mapper/test_vg2-lv3", "/dev/mapper/test_vg2-lv4", "/dev/mapper/test_vg3-lv5", "/dev/mapper/test_vg3-lv6", "/dev/mapper/test_vg3-lv7", "/dev/mapper/test_vg3-lv8" ], "mounts": [], "packages": [ "lvm2", "xfsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg1-lv1", "_kernel_device": "/dev/dm-7", "_mount_id": "/dev/mapper/test_vg1-lv1", "_raw_device": "/dev/mapper/test_vg1-lv1", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "15%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg1-lv2", "_kernel_device": "/dev/dm-6", "_mount_id": "/dev/mapper/test_vg1-lv2", "_raw_device": "/dev/mapper/test_vg1-lv2", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "50%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg2", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg2-lv3", "_kernel_device": "/dev/dm-5", "_mount_id": "/dev/mapper/test_vg2-lv3", "_raw_device": "/dev/mapper/test_vg2-lv3", "_raw_kernel_device": "/dev/dm-5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv3", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg2-lv4", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/test_vg2-lv4", "_raw_device": "/dev/mapper/test_vg2-lv4", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv4", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "20%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg3", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg3-lv5", "_kernel_device": "/dev/dm-3", "_mount_id": "/dev/mapper/test_vg3-lv5", "_raw_device": "/dev/mapper/test_vg3-lv5", "_raw_kernel_device": "/dev/dm-3", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv5", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "30%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv6", "_kernel_device": "/dev/dm-2", "_mount_id": "/dev/mapper/test_vg3-lv6", "_raw_device": "/dev/mapper/test_vg3-lv6", "_raw_kernel_device": "/dev/dm-2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv6", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "25%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv7", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/test_vg3-lv7", "_raw_device": "/dev/mapper/test_vg3-lv7", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv7", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv8", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/test_vg3-lv8", "_raw_device": "/dev/mapper/test_vg3-lv8", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv8", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:110 Saturday 13 July 2024 22:11:38 +0000 (0:00:00.020) 0:00:31.835 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg1-lv1", "_kernel_device": "/dev/dm-7", "_mount_id": "/dev/mapper/test_vg1-lv1", "_raw_device": "/dev/mapper/test_vg1-lv1", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "15%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg1-lv2", "_kernel_device": "/dev/dm-6", "_mount_id": "/dev/mapper/test_vg1-lv2", "_raw_device": "/dev/mapper/test_vg1-lv2", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "50%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg2", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg2-lv3", "_kernel_device": "/dev/dm-5", "_mount_id": "/dev/mapper/test_vg2-lv3", "_raw_device": "/dev/mapper/test_vg2-lv3", "_raw_kernel_device": "/dev/dm-5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv3", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg2-lv4", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/test_vg2-lv4", "_raw_device": "/dev/mapper/test_vg2-lv4", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv4", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "20%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg3", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg3-lv5", "_kernel_device": "/dev/dm-3", "_mount_id": "/dev/mapper/test_vg3-lv5", "_raw_device": "/dev/mapper/test_vg3-lv5", "_raw_kernel_device": "/dev/dm-3", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv5", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "30%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv6", "_kernel_device": "/dev/dm-2", "_mount_id": "/dev/mapper/test_vg3-lv6", "_raw_device": "/dev/mapper/test_vg3-lv6", "_raw_kernel_device": "/dev/dm-2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv6", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "25%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv7", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/test_vg3-lv7", "_raw_device": "/dev/mapper/test_vg3-lv7", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv7", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv8", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/test_vg3-lv8", "_raw_device": "/dev/mapper/test_vg3-lv8", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv8", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:114 Saturday 13 July 2024 22:11:38 +0000 (0:00:00.019) 0:00:31.855 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Saturday 13 July 2024 22:11:38 +0000 (0:00:00.014) 0:00:31.869 ********* TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:141 Saturday 13 July 2024 22:11:38 +0000 (0:00:00.012) 0:00:31.881 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:146 Saturday 13 July 2024 22:11:38 +0000 (0:00:00.011) 0:00:31.893 ********* TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:157 Saturday 13 July 2024 22:11:38 +0000 (0:00:00.011) 0:00:31.904 ********* TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:169 Saturday 13 July 2024 22:11:38 +0000 (0:00:00.012) 0:00:31.916 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Saturday 13 July 2024 22:11:38 +0000 (0:00:00.012) 0:00:31.929 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1720907881.200353, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1716968941.893, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 135, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1716968586.525, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "1157759751", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:182 Saturday 13 July 2024 22:11:39 +0000 (0:00:00.178) 0:00:32.107 ********* TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:204 Saturday 13 July 2024 22:11:39 +0000 (0:00:00.011) 0:00:32.119 ********* ok: [sut] TASK [Run the snapshot role to create a snapshot set of LVs] ******************* task path: /WORKDIR/git-module_workd5cejf_z/tests/tests_set_extend.yml:77 Saturday 13 July 2024 22:11:39 +0000 (0:00:00.615) 0:00:32.735 ********* TASK [linux-system-roles.snapshot : Set platform/version specific variables] *** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:3 Saturday 13 July 2024 22:11:39 +0000 (0:00:00.036) 0:00:32.771 ********* included: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml for sut TASK [linux-system-roles.snapshot : Ensure ansible_facts used by role] ********* task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:2 Saturday 13 July 2024 22:11:39 +0000 (0:00:00.026) 0:00:32.797 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Check if system is ostree] ***************** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:11 Saturday 13 July 2024 22:11:39 +0000 (0:00:00.016) 0:00:32.814 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Set flag to indicate system is ostree] ***** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:16 Saturday 13 July 2024 22:11:39 +0000 (0:00:00.012) 0:00:32.827 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Set platform/version specific variables] *** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:20 Saturday 13 July 2024 22:11:39 +0000 (0:00:00.013) 0:00:32.841 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_packages": [ "lvm2", "util-linux" ], "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [sut] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_packages": [ "lvm2", "util-linux" ], "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [linux-system-roles.snapshot : Ensure required packages are installed] **** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:6 Saturday 13 July 2024 22:11:39 +0000 (0:00:00.033) 0:00:32.874 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [linux-system-roles.snapshot : Run snapshot module snapshot] ************** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:15 Saturday 13 July 2024 22:11:42 +0000 (0:00:02.376) 0:00:35.251 ********* changed: [sut] => { "changed": true, "errors": "", "message": "", "return_code": 0 } TASK [linux-system-roles.snapshot : Print out response] ************************ task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:43 Saturday 13 July 2024 22:11:44 +0000 (0:00:01.847) 0:00:37.099 ********* ok: [sut] => { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0, "warnings": [ "The value 15 (type int) in a string field was converted to '15' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 15 (type int) in a string field was converted to '15' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 15 (type int) in a string field was converted to '15' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 15 (type int) in a string field was converted to '15' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change." ] } } TASK [linux-system-roles.snapshot : Set result] ******************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:48 Saturday 13 July 2024 22:11:44 +0000 (0:00:00.015) 0:00:37.114 ********* ok: [sut] => { "ansible_facts": { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0, "warnings": [ "The value 15 (type int) in a string field was converted to '15' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 15 (type int) in a string field was converted to '15' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 15 (type int) in a string field was converted to '15' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 15 (type int) in a string field was converted to '15' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change." ] } }, "changed": false } TASK [linux-system-roles.snapshot : Set snapshot_facts to the JSON results] **** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:52 Saturday 13 July 2024 22:11:44 +0000 (0:00:00.014) 0:00:37.128 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Show errors] ******************************* task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:57 Saturday 13 July 2024 22:11:44 +0000 (0:00:00.013) 0:00:37.142 ********* skipping: [sut] => {} TASK [Verify the set of snapshots for the LVs] ********************************* task path: /WORKDIR/git-module_workd5cejf_z/tests/tests_set_extend.yml:84 Saturday 13 July 2024 22:11:44 +0000 (0:00:00.016) 0:00:37.158 ********* TASK [linux-system-roles.snapshot : Set platform/version specific variables] *** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:3 Saturday 13 July 2024 22:11:44 +0000 (0:00:00.026) 0:00:37.184 ********* included: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml for sut TASK [linux-system-roles.snapshot : Ensure ansible_facts used by role] ********* task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:2 Saturday 13 July 2024 22:11:44 +0000 (0:00:00.018) 0:00:37.202 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Check if system is ostree] ***************** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:11 Saturday 13 July 2024 22:11:44 +0000 (0:00:00.036) 0:00:37.239 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Set flag to indicate system is ostree] ***** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:16 Saturday 13 July 2024 22:11:44 +0000 (0:00:00.015) 0:00:37.254 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Set platform/version specific variables] *** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:20 Saturday 13 July 2024 22:11:44 +0000 (0:00:00.015) 0:00:37.270 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_packages": [ "lvm2", "util-linux" ], "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [sut] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_packages": [ "lvm2", "util-linux" ], "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [linux-system-roles.snapshot : Ensure required packages are installed] **** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:6 Saturday 13 July 2024 22:11:44 +0000 (0:00:00.035) 0:00:37.305 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [linux-system-roles.snapshot : Run snapshot module check] ***************** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:15 Saturday 13 July 2024 22:11:46 +0000 (0:00:02.359) 0:00:39.665 ********* ok: [sut] => { "changed": false, "errors": "", "message": "", "return_code": 0 } TASK [linux-system-roles.snapshot : Print out response] ************************ task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:43 Saturday 13 July 2024 22:11:47 +0000 (0:00:00.678) 0:00:40.343 ********* ok: [sut] => { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0, "warnings": [ "The value 15 (type int) in a string field was converted to '15' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 15 (type int) in a string field was converted to '15' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 15 (type int) in a string field was converted to '15' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 15 (type int) in a string field was converted to '15' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change." ] } } TASK [linux-system-roles.snapshot : Set result] ******************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:48 Saturday 13 July 2024 22:11:47 +0000 (0:00:00.032) 0:00:40.376 ********* ok: [sut] => { "ansible_facts": { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0, "warnings": [ "The value 15 (type int) in a string field was converted to '15' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 15 (type int) in a string field was converted to '15' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 15 (type int) in a string field was converted to '15' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 15 (type int) in a string field was converted to '15' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change." ] } }, "changed": false } TASK [linux-system-roles.snapshot : Set snapshot_facts to the JSON results] **** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:52 Saturday 13 July 2024 22:11:47 +0000 (0:00:00.028) 0:00:40.404 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Show errors] ******************************* task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:57 Saturday 13 July 2024 22:11:47 +0000 (0:00:00.034) 0:00:40.439 ********* skipping: [sut] => {} TASK [Extend the set] ********************************************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/tests_set_extend.yml:92 Saturday 13 July 2024 22:11:47 +0000 (0:00:00.024) 0:00:40.463 ********* TASK [linux-system-roles.snapshot : Set platform/version specific variables] *** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:3 Saturday 13 July 2024 22:11:47 +0000 (0:00:00.037) 0:00:40.501 ********* included: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml for sut TASK [linux-system-roles.snapshot : Ensure ansible_facts used by role] ********* task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:2 Saturday 13 July 2024 22:11:47 +0000 (0:00:00.028) 0:00:40.529 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Check if system is ostree] ***************** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:11 Saturday 13 July 2024 22:11:47 +0000 (0:00:00.028) 0:00:40.557 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Set flag to indicate system is ostree] ***** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:16 Saturday 13 July 2024 22:11:47 +0000 (0:00:00.023) 0:00:40.581 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Set platform/version specific variables] *** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:20 Saturday 13 July 2024 22:11:47 +0000 (0:00:00.016) 0:00:40.597 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_packages": [ "lvm2", "util-linux" ], "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [sut] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_packages": [ "lvm2", "util-linux" ], "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [linux-system-roles.snapshot : Ensure required packages are installed] **** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:6 Saturday 13 July 2024 22:11:47 +0000 (0:00:00.049) 0:00:40.646 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [linux-system-roles.snapshot : Run snapshot module extend] **************** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:15 Saturday 13 July 2024 22:11:50 +0000 (0:00:02.367) 0:00:43.014 ********* changed: [sut] => { "changed": true, "errors": "", "message": "", "return_code": 0 } TASK [linux-system-roles.snapshot : Print out response] ************************ task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:43 Saturday 13 July 2024 22:11:51 +0000 (0:00:01.032) 0:00:44.047 ********* ok: [sut] => { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0, "warnings": [ "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change." ] } } TASK [linux-system-roles.snapshot : Set result] ******************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:48 Saturday 13 July 2024 22:11:51 +0000 (0:00:00.014) 0:00:44.062 ********* ok: [sut] => { "ansible_facts": { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0, "warnings": [ "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change." ] } }, "changed": false } TASK [linux-system-roles.snapshot : Set snapshot_facts to the JSON results] **** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:52 Saturday 13 July 2024 22:11:51 +0000 (0:00:00.015) 0:00:44.077 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Show errors] ******************************* task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:57 Saturday 13 July 2024 22:11:51 +0000 (0:00:00.013) 0:00:44.090 ********* skipping: [sut] => {} TASK [Assert changes for extend] *********************************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/tests_set_extend.yml:99 Saturday 13 July 2024 22:11:51 +0000 (0:00:00.016) 0:00:44.106 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the extend is done] *********************************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/tests_set_extend.yml:103 Saturday 13 July 2024 22:11:51 +0000 (0:00:00.032) 0:00:44.138 ********* TASK [linux-system-roles.snapshot : Set platform/version specific variables] *** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:3 Saturday 13 July 2024 22:11:51 +0000 (0:00:00.029) 0:00:44.167 ********* included: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml for sut TASK [linux-system-roles.snapshot : Ensure ansible_facts used by role] ********* task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:2 Saturday 13 July 2024 22:11:51 +0000 (0:00:00.018) 0:00:44.186 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Check if system is ostree] ***************** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:11 Saturday 13 July 2024 22:11:51 +0000 (0:00:00.015) 0:00:44.202 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Set flag to indicate system is ostree] ***** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:16 Saturday 13 July 2024 22:11:51 +0000 (0:00:00.013) 0:00:44.215 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Set platform/version specific variables] *** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:20 Saturday 13 July 2024 22:11:51 +0000 (0:00:00.012) 0:00:44.227 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_packages": [ "lvm2", "util-linux" ], "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [sut] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_packages": [ "lvm2", "util-linux" ], "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [linux-system-roles.snapshot : Ensure required packages are installed] **** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:6 Saturday 13 July 2024 22:11:51 +0000 (0:00:00.032) 0:00:44.260 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [linux-system-roles.snapshot : Run snapshot module extend] **************** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:15 Saturday 13 July 2024 22:11:53 +0000 (0:00:02.357) 0:00:46.617 ********* ok: [sut] => { "changed": false, "errors": "", "message": "", "return_code": 0 } TASK [linux-system-roles.snapshot : Print out response] ************************ task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:43 Saturday 13 July 2024 22:11:54 +0000 (0:00:00.536) 0:00:47.154 ********* ok: [sut] => { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0, "warnings": [ "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change." ] } } TASK [linux-system-roles.snapshot : Set result] ******************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:48 Saturday 13 July 2024 22:11:54 +0000 (0:00:00.015) 0:00:47.169 ********* ok: [sut] => { "ansible_facts": { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0, "warnings": [ "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change." ] } }, "changed": false } TASK [linux-system-roles.snapshot : Set snapshot_facts to the JSON results] **** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:52 Saturday 13 July 2024 22:11:54 +0000 (0:00:00.014) 0:00:47.183 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Show errors] ******************************* task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:57 Saturday 13 July 2024 22:11:54 +0000 (0:00:00.013) 0:00:47.196 ********* skipping: [sut] => {} TASK [Extend the set again to check idempotence] ******************************* task path: /WORKDIR/git-module_workd5cejf_z/tests/tests_set_extend.yml:111 Saturday 13 July 2024 22:11:54 +0000 (0:00:00.012) 0:00:47.209 ********* TASK [linux-system-roles.snapshot : Set platform/version specific variables] *** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:3 Saturday 13 July 2024 22:11:54 +0000 (0:00:00.030) 0:00:47.239 ********* included: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml for sut TASK [linux-system-roles.snapshot : Ensure ansible_facts used by role] ********* task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:2 Saturday 13 July 2024 22:11:54 +0000 (0:00:00.018) 0:00:47.257 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Check if system is ostree] ***************** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:11 Saturday 13 July 2024 22:11:54 +0000 (0:00:00.032) 0:00:47.290 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Set flag to indicate system is ostree] ***** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:16 Saturday 13 July 2024 22:11:54 +0000 (0:00:00.013) 0:00:47.304 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Set platform/version specific variables] *** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:20 Saturday 13 July 2024 22:11:54 +0000 (0:00:00.012) 0:00:47.316 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_packages": [ "lvm2", "util-linux" ], "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [sut] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_packages": [ "lvm2", "util-linux" ], "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [linux-system-roles.snapshot : Ensure required packages are installed] **** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:6 Saturday 13 July 2024 22:11:54 +0000 (0:00:00.032) 0:00:47.348 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [linux-system-roles.snapshot : Run snapshot module extend] **************** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:15 Saturday 13 July 2024 22:11:56 +0000 (0:00:02.382) 0:00:49.731 ********* ok: [sut] => { "changed": false, "errors": "", "message": "", "return_code": 0 } TASK [linux-system-roles.snapshot : Print out response] ************************ task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:43 Saturday 13 July 2024 22:11:57 +0000 (0:00:00.621) 0:00:50.352 ********* ok: [sut] => { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0, "warnings": [ "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change." ] } } TASK [linux-system-roles.snapshot : Set result] ******************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:48 Saturday 13 July 2024 22:11:57 +0000 (0:00:00.013) 0:00:50.366 ********* ok: [sut] => { "ansible_facts": { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0, "warnings": [ "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change." ] } }, "changed": false } TASK [linux-system-roles.snapshot : Set snapshot_facts to the JSON results] **** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:52 Saturday 13 July 2024 22:11:57 +0000 (0:00:00.013) 0:00:50.380 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Show errors] ******************************* task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:57 Saturday 13 July 2024 22:11:57 +0000 (0:00:00.012) 0:00:50.392 ********* skipping: [sut] => {} TASK [Assert no changes for extend] ******************************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/tests_set_extend.yml:118 Saturday 13 July 2024 22:11:57 +0000 (0:00:00.011) 0:00:50.404 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Run the snapshot role remove the set] ************************************ task path: /WORKDIR/git-module_workd5cejf_z/tests/tests_set_extend.yml:122 Saturday 13 July 2024 22:11:57 +0000 (0:00:00.013) 0:00:50.417 ********* TASK [linux-system-roles.snapshot : Set platform/version specific variables] *** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:3 Saturday 13 July 2024 22:11:57 +0000 (0:00:00.034) 0:00:50.452 ********* included: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml for sut TASK [linux-system-roles.snapshot : Ensure ansible_facts used by role] ********* task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:2 Saturday 13 July 2024 22:11:57 +0000 (0:00:00.017) 0:00:50.470 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Check if system is ostree] ***************** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:11 Saturday 13 July 2024 22:11:57 +0000 (0:00:00.016) 0:00:50.486 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Set flag to indicate system is ostree] ***** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:16 Saturday 13 July 2024 22:11:57 +0000 (0:00:00.012) 0:00:50.498 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Set platform/version specific variables] *** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:20 Saturday 13 July 2024 22:11:57 +0000 (0:00:00.011) 0:00:50.510 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_packages": [ "lvm2", "util-linux" ], "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [sut] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_packages": [ "lvm2", "util-linux" ], "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [linux-system-roles.snapshot : Ensure required packages are installed] **** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:6 Saturday 13 July 2024 22:11:57 +0000 (0:00:00.032) 0:00:50.542 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [linux-system-roles.snapshot : Run snapshot module remove] **************** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:15 Saturday 13 July 2024 22:11:59 +0000 (0:00:02.375) 0:00:52.918 ********* changed: [sut] => { "changed": true, "errors": "", "message": "", "return_code": 0 } TASK [linux-system-roles.snapshot : Print out response] ************************ task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:43 Saturday 13 July 2024 22:12:01 +0000 (0:00:01.276) 0:00:54.194 ********* ok: [sut] => { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0, "warnings": [ "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change." ] } } TASK [linux-system-roles.snapshot : Set result] ******************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:48 Saturday 13 July 2024 22:12:01 +0000 (0:00:00.014) 0:00:54.209 ********* ok: [sut] => { "ansible_facts": { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0, "warnings": [ "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change." ] } }, "changed": false } TASK [linux-system-roles.snapshot : Set snapshot_facts to the JSON results] **** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:52 Saturday 13 July 2024 22:12:01 +0000 (0:00:00.014) 0:00:54.224 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Show errors] ******************************* task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:57 Saturday 13 July 2024 22:12:01 +0000 (0:00:00.013) 0:00:54.238 ********* skipping: [sut] => {} TASK [Run the snapshot role to verify the set is removed] ********************** task path: /WORKDIR/git-module_workd5cejf_z/tests/tests_set_extend.yml:129 Saturday 13 July 2024 22:12:01 +0000 (0:00:00.017) 0:00:54.256 ********* TASK [linux-system-roles.snapshot : Set platform/version specific variables] *** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:3 Saturday 13 July 2024 22:12:01 +0000 (0:00:00.077) 0:00:54.334 ********* included: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml for sut TASK [linux-system-roles.snapshot : Ensure ansible_facts used by role] ********* task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:2 Saturday 13 July 2024 22:12:01 +0000 (0:00:00.020) 0:00:54.354 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Check if system is ostree] ***************** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:11 Saturday 13 July 2024 22:12:01 +0000 (0:00:00.015) 0:00:54.370 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Set flag to indicate system is ostree] ***** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:16 Saturday 13 July 2024 22:12:01 +0000 (0:00:00.012) 0:00:54.382 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Set platform/version specific variables] *** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/set_vars.yml:20 Saturday 13 July 2024 22:12:01 +0000 (0:00:00.012) 0:00:54.395 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_packages": [ "lvm2", "util-linux" ], "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [sut] => (item=CentOS_8.yml) => { "ansible_facts": { "__snapshot_packages": [ "lvm2", "util-linux" ], "__snapshot_python": "/usr/libexec/platform-python" }, "ansible_included_var_files": [ "/WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [linux-system-roles.snapshot : Ensure required packages are installed] **** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:6 Saturday 13 July 2024 22:12:01 +0000 (0:00:00.033) 0:00:54.428 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [linux-system-roles.snapshot : Run snapshot module remove] **************** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:15 Saturday 13 July 2024 22:12:03 +0000 (0:00:02.388) 0:00:56.816 ********* ok: [sut] => { "changed": false, "errors": "", "message": "", "return_code": 0 } TASK [linux-system-roles.snapshot : Print out response] ************************ task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:43 Saturday 13 July 2024 22:12:04 +0000 (0:00:00.401) 0:00:57.218 ********* ok: [sut] => { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0, "warnings": [ "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change." ] } } TASK [linux-system-roles.snapshot : Set result] ******************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:48 Saturday 13 July 2024 22:12:04 +0000 (0:00:00.013) 0:00:57.232 ********* ok: [sut] => { "ansible_facts": { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0, "warnings": [ "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change.", "The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change." ] } }, "changed": false } TASK [linux-system-roles.snapshot : Set snapshot_facts to the JSON results] **** task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:52 Saturday 13 July 2024 22:12:04 +0000 (0:00:00.013) 0:00:57.245 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.snapshot : Show errors] ******************************* task path: /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:57 Saturday 13 July 2024 22:12:04 +0000 (0:00:00.011) 0:00:57.257 ********* skipping: [sut] => {} TASK [Cleanup] ***************************************************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/tests_set_extend.yml:138 Saturday 13 July 2024 22:12:04 +0000 (0:00:00.012) 0:00:57.270 ********* included: /WORKDIR/git-module_workd5cejf_z/tests/tasks/cleanup.yml for sut TASK [Remove storage volumes] ************************************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/tasks/cleanup.yml:7 Saturday 13 July 2024 22:12:04 +0000 (0:00:00.029) 0:00:57.300 ********* TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Saturday 13 July 2024 22:12:04 +0000 (0:00:00.021) 0:00:57.321 ********* included: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for sut TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Saturday 13 July 2024 22:12:04 +0000 (0:00:00.027) 0:00:57.349 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Saturday 13 July 2024 22:12:04 +0000 (0:00:00.018) 0:00:57.367 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs", "stratisd", "stratis-cli" ] }, "ansible_included_var_files": [ "/WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [sut] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs", "stratisd", "stratis-cli" ] }, "ansible_included_var_files": [ "/WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Saturday 13 July 2024 22:12:04 +0000 (0:00:00.067) 0:00:57.435 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Saturday 13 July 2024 22:12:04 +0000 (0:00:00.021) 0:00:57.456 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Saturday 13 July 2024 22:12:04 +0000 (0:00:00.032) 0:00:57.488 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Saturday 13 July 2024 22:12:04 +0000 (0:00:00.027) 0:00:57.516 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Saturday 13 July 2024 22:12:04 +0000 (0:00:00.014) 0:00:57.530 ********* included: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for sut TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Saturday 13 July 2024 22:12:04 +0000 (0:00:00.038) 0:00:57.569 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Saturday 13 July 2024 22:12:06 +0000 (0:00:02.382) 0:00:59.951 ********* ok: [sut] => { "storage_pools": [ { "disks": [ "sda", "sdb", "sdc" ], "name": "test_vg1", "state": "absent", "volumes": [ { "name": "lv1", "state": "absent" }, { "name": "lv2", "state": "absent" } ] }, { "disks": [ "sdd", "sde", "sdf" ], "name": "test_vg2", "state": "absent", "volumes": [ { "name": "lv3", "state": "absent" }, { "name": "lv4", "state": "absent" } ] }, { "disks": [ "sdg", "sdh", "sdi", "sdj" ], "name": "test_vg3", "state": "absent", "volumes": [ { "name": "lv5", "state": "absent" }, { "name": "lv6", "state": "absent" }, { "name": "lv7", "state": "absent" }, { "name": "lv8", "state": "absent" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Saturday 13 July 2024 22:12:07 +0000 (0:00:00.023) 0:00:59.975 ********* ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Saturday 13 July 2024 22:12:07 +0000 (0:00:00.015) 0:00:59.990 ********* ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31 Saturday 13 July 2024 22:12:13 +0000 (0:00:06.017) 0:01:06.008 ********* included: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for sut TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Saturday 13 July 2024 22:12:13 +0000 (0:00:00.029) 0:01:06.037 ********* TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Saturday 13 July 2024 22:12:13 +0000 (0:00:00.029) 0:01:06.067 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************ task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19 Saturday 13 July 2024 22:12:13 +0000 (0:00:00.013) 0:01:06.081 ********* TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 Saturday 13 July 2024 22:12:13 +0000 (0:00:00.049) 0:01:06.130 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51 Saturday 13 July 2024 22:12:15 +0000 (0:00:02.399) 0:01:08.529 ********* ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "running", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "lvm2-pvscan@8:0.service": { "name": "lvm2-pvscan@8:0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:112.service": { "name": "lvm2-pvscan@8:112.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:128.service": { "name": "lvm2-pvscan@8:128.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:144.service": { "name": "lvm2-pvscan@8:144.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:16.service": { "name": "lvm2-pvscan@8:16.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:32.service": { "name": "lvm2-pvscan@8:32.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:48.service": { "name": "lvm2-pvscan@8:48.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:64.service": { "name": "lvm2-pvscan@8:64.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:80.service": { "name": "lvm2-pvscan@8:80.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:96.service": { "name": "lvm2-pvscan@8:96.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "stratis-fstab-setup@.service": { "name": "stratis-fstab-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratisd-min-postinitrd.service": { "name": "stratisd-min-postinitrd.service", "source": "systemd", "state": "inactive", "status": "static" }, "stratisd.service": { "name": "stratisd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" }, "vdo-start-by-dev@.service": { "name": "vdo-start-by-dev@.service", "source": "systemd", "state": "unknown", "status": "static" }, "vdo.service": { "name": "vdo.service", "source": "systemd", "state": "stopped", "status": "enabled" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57 Saturday 13 July 2024 22:12:16 +0000 (0:00:01.435) 0:01:09.965 ********* ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63 Saturday 13 July 2024 22:12:17 +0000 (0:00:00.025) 0:01:09.991 ********* TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 Saturday 13 July 2024 22:12:17 +0000 (0:00:00.025) 0:01:10.016 ********* changed: [sut] => { "actions": [ { "action": "destroy format", "device": "/dev/mapper/test_vg3-lv8", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg3-lv8", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg3-lv7", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg3-lv7", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg3-lv6", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg3-lv6", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg3-lv5", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg3-lv5", "fs_type": null }, { "action": "destroy device", "device": "/dev/test_vg3", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdg", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdj", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdh", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdi", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/mapper/test_vg2-lv4", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg2-lv4", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg2-lv3", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg2-lv3", "fs_type": null }, { "action": "destroy device", "device": "/dev/test_vg2", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdf", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sde", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdd", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/mapper/test_vg1-lv2", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg1-lv2", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg1-lv1", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg1-lv1", "fs_type": null }, { "action": "destroy device", "device": "/dev/test_vg1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdc", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdb", "fs_type": "lvmpv" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdk", "/dev/sdl", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/sdj", "/dev/xvda1" ], "mounts": [], "packages": [ "xfsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg1-lv1", "_mount_id": "/dev/mapper/test_vg1-lv1", "_raw_device": "/dev/mapper/test_vg1-lv1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 482344960, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg1-lv2", "_mount_id": "/dev/mapper/test_vg1-lv2", "_raw_device": "/dev/mapper/test_vg1-lv2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1606418432, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg2", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg2-lv3", "_mount_id": "/dev/mapper/test_vg2-lv3", "_raw_device": "/dev/mapper/test_vg2-lv3", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv3", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 322961408, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg2-lv4", "_mount_id": "/dev/mapper/test_vg2-lv4", "_raw_device": "/dev/mapper/test_vg2-lv4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv4", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 641728512, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg3", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg3-lv5", "_mount_id": "/dev/mapper/test_vg3-lv5", "_raw_device": "/dev/mapper/test_vg3-lv5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv5", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1283457024, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv6", "_mount_id": "/dev/mapper/test_vg3-lv6", "_raw_device": "/dev/mapper/test_vg3-lv6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv6", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1069547520, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv7", "_mount_id": "/dev/mapper/test_vg3-lv7", "_raw_device": "/dev/mapper/test_vg3-lv7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv7", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 427819008, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv8", "_mount_id": "/dev/mapper/test_vg3-lv8", "_raw_device": "/dev/mapper/test_vg3-lv8", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv8", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 427819008, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83 Saturday 13 July 2024 22:12:27 +0000 (0:00:10.388) 0:01:20.405 ********* ok: [sut] => { "changed": false, "cmd": [ "udevadm", "trigger", "--subsystem-match=block" ], "delta": "0:00:00.010067", "end": "2024-07-13 22:12:27.601476", "rc": 0, "start": "2024-07-13 22:12:27.591409" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95 Saturday 13 July 2024 22:12:27 +0000 (0:00:00.198) 0:01:20.603 ********* TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:101 Saturday 13 July 2024 22:12:27 +0000 (0:00:00.012) 0:01:20.615 ********* ok: [sut] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/mapper/test_vg3-lv8", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg3-lv8", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg3-lv7", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg3-lv7", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg3-lv6", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg3-lv6", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg3-lv5", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg3-lv5", "fs_type": null }, { "action": "destroy device", "device": "/dev/test_vg3", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdg", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdj", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdh", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdi", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/mapper/test_vg2-lv4", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg2-lv4", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg2-lv3", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg2-lv3", "fs_type": null }, { "action": "destroy device", "device": "/dev/test_vg2", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdf", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sde", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdd", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/mapper/test_vg1-lv2", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg1-lv2", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg1-lv1", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg1-lv1", "fs_type": null }, { "action": "destroy device", "device": "/dev/test_vg1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdc", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdb", "fs_type": "lvmpv" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdk", "/dev/sdl", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/sdj", "/dev/xvda1" ], "mounts": [], "packages": [ "xfsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg1-lv1", "_mount_id": "/dev/mapper/test_vg1-lv1", "_raw_device": "/dev/mapper/test_vg1-lv1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 482344960, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg1-lv2", "_mount_id": "/dev/mapper/test_vg1-lv2", "_raw_device": "/dev/mapper/test_vg1-lv2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1606418432, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg2", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg2-lv3", "_mount_id": "/dev/mapper/test_vg2-lv3", "_raw_device": "/dev/mapper/test_vg2-lv3", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv3", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 322961408, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg2-lv4", "_mount_id": "/dev/mapper/test_vg2-lv4", "_raw_device": "/dev/mapper/test_vg2-lv4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv4", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 641728512, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg3", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg3-lv5", "_mount_id": "/dev/mapper/test_vg3-lv5", "_raw_device": "/dev/mapper/test_vg3-lv5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv5", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1283457024, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv6", "_mount_id": "/dev/mapper/test_vg3-lv6", "_raw_device": "/dev/mapper/test_vg3-lv6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv6", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1069547520, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv7", "_mount_id": "/dev/mapper/test_vg3-lv7", "_raw_device": "/dev/mapper/test_vg3-lv7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv7", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 427819008, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv8", "_mount_id": "/dev/mapper/test_vg3-lv8", "_raw_device": "/dev/mapper/test_vg3-lv8", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv8", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 427819008, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:110 Saturday 13 July 2024 22:12:27 +0000 (0:00:00.021) 0:01:20.637 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg1-lv1", "_mount_id": "/dev/mapper/test_vg1-lv1", "_raw_device": "/dev/mapper/test_vg1-lv1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 482344960, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg1-lv2", "_mount_id": "/dev/mapper/test_vg1-lv2", "_raw_device": "/dev/mapper/test_vg1-lv2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1606418432, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg2", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg2-lv3", "_mount_id": "/dev/mapper/test_vg2-lv3", "_raw_device": "/dev/mapper/test_vg2-lv3", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv3", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 322961408, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg2-lv4", "_mount_id": "/dev/mapper/test_vg2-lv4", "_raw_device": "/dev/mapper/test_vg2-lv4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv4", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 641728512, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg3", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg3-lv5", "_mount_id": "/dev/mapper/test_vg3-lv5", "_raw_device": "/dev/mapper/test_vg3-lv5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv5", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1283457024, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv6", "_mount_id": "/dev/mapper/test_vg3-lv6", "_raw_device": "/dev/mapper/test_vg3-lv6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv6", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1069547520, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv7", "_mount_id": "/dev/mapper/test_vg3-lv7", "_raw_device": "/dev/mapper/test_vg3-lv7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv7", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 427819008, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv8", "_mount_id": "/dev/mapper/test_vg3-lv8", "_raw_device": "/dev/mapper/test_vg3-lv8", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv8", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 427819008, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:114 Saturday 13 July 2024 22:12:27 +0000 (0:00:00.021) 0:01:20.658 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Saturday 13 July 2024 22:12:27 +0000 (0:00:00.014) 0:01:20.673 ********* TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:141 Saturday 13 July 2024 22:12:27 +0000 (0:00:00.013) 0:01:20.686 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:146 Saturday 13 July 2024 22:12:27 +0000 (0:00:00.013) 0:01:20.700 ********* TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:157 Saturday 13 July 2024 22:12:27 +0000 (0:00:00.014) 0:01:20.714 ********* TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:169 Saturday 13 July 2024 22:12:27 +0000 (0:00:00.013) 0:01:20.728 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Saturday 13 July 2024 22:12:27 +0000 (0:00:00.013) 0:01:20.741 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1720907881.200353, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1716968941.893, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 135, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1716968586.525, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "1157759751", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:182 Saturday 13 July 2024 22:12:27 +0000 (0:00:00.184) 0:01:20.926 ********* TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:204 Saturday 13 July 2024 22:12:27 +0000 (0:00:00.013) 0:01:20.939 ********* ok: [sut] TASK [Save unused_disk_return before verify] *********************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/tasks/cleanup.yml:30 Saturday 13 July 2024 22:12:28 +0000 (0:00:00.582) 0:01:21.521 ********* ok: [sut] => { "ansible_facts": { "unused_disks_before": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi", "sdj" ] }, "changed": false } TASK [Verify that pools/volumes used in test are removed] ********************** task path: /WORKDIR/git-module_workd5cejf_z/tests/tasks/cleanup.yml:34 Saturday 13 July 2024 22:12:28 +0000 (0:00:00.013) 0:01:21.535 ********* included: /WORKDIR/git-module_workd5cejf_z/tests/get_unused_disk.yml for sut TASK [Check if system is ostree] *********************************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/get_unused_disk.yml:5 Saturday 13 July 2024 22:12:28 +0000 (0:00:00.019) 0:01:21.554 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set flag to indicate system is ostree] *********************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/get_unused_disk.yml:10 Saturday 13 July 2024 22:12:28 +0000 (0:00:00.013) 0:01:21.568 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure test packages] **************************************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/get_unused_disk.yml:14 Saturday 13 July 2024 22:12:28 +0000 (0:00:00.019) 0:01:21.587 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [Find unused disks in the system] ***************************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/get_unused_disk.yml:28 Saturday 13 July 2024 22:12:30 +0000 (0:00:02.380) 0:01:23.967 ********* ok: [sut] => { "changed": false, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi", "sdj" ], "info": [] } TASK [Set unused_disks if necessary] ******************************************* task path: /WORKDIR/git-module_workd5cejf_z/tests/get_unused_disk.yml:36 Saturday 13 July 2024 22:12:31 +0000 (0:00:00.184) 0:01:24.152 ********* ok: [sut] => { "ansible_facts": { "unused_disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi", "sdj" ] }, "changed": false } TASK [Print unused disks] ****************************************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/get_unused_disk.yml:41 Saturday 13 July 2024 22:12:31 +0000 (0:00:00.015) 0:01:24.167 ********* ok: [sut] => { "unused_disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi", "sdj" ] } TASK [Print info from find_unused_disk] **************************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/get_unused_disk.yml:49 Saturday 13 July 2024 22:12:31 +0000 (0:00:00.013) 0:01:24.181 ********* skipping: [sut] => {} TASK [Show disk information] *************************************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/get_unused_disk.yml:54 Saturday 13 July 2024 22:12:31 +0000 (0:00:00.014) 0:01:24.195 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Exit playbook when there's not enough unused disks in the system] ******** task path: /WORKDIR/git-module_workd5cejf_z/tests/get_unused_disk.yml:63 Saturday 13 July 2024 22:12:31 +0000 (0:00:00.018) 0:01:24.213 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Debug why list of unused disks has changed] ****************************** task path: /WORKDIR/git-module_workd5cejf_z/tests/tasks/cleanup.yml:40 Saturday 13 July 2024 22:12:31 +0000 (0:00:00.016) 0:01:24.230 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } META: ran handlers META: ran handlers PLAY RECAP ********************************************************************* sut : ok=123 changed=5 unreachable=0 failed=0 skipped=85 rescued=0 ignored=0 Saturday 13 July 2024 22:12:31 +0000 (0:00:00.008) 0:01:24.238 ********* =============================================================================== fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state -- 10.39s /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 8.70s /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 fedora.linux_system_roles.storage : Get required packages --------------- 6.02s /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 linux-system-roles.snapshot : Ensure required packages are installed ---- 4.75s /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:6 fedora.linux_system_roles.storage : Get required packages --------------- 3.85s /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 fedora.linux_system_roles.storage : Make sure blivet is available ------- 2.42s /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 fedora.linux_system_roles.storage : Make sure required packages are installed --- 2.40s /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 linux-system-roles.snapshot : Ensure required packages are installed ---- 2.39s /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:6 fedora.linux_system_roles.storage : Make sure blivet is available ------- 2.38s /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Ensure test packages ---------------------------------------------------- 2.38s /WORKDIR/git-module_workd5cejf_z/tests/get_unused_disk.yml:14 ----------------- linux-system-roles.snapshot : Ensure required packages are installed ---- 2.38s /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:6 linux-system-roles.snapshot : Ensure required packages are installed ---- 2.38s /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:6 Ensure test packages ---------------------------------------------------- 2.37s /WORKDIR/git-module_workd5cejf_z/tests/get_unused_disk.yml:14 ----------------- fedora.linux_system_roles.storage : Make sure required packages are installed --- 2.37s /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 fedora.linux_system_roles.storage : Make sure required packages are installed --- 2.37s /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 linux-system-roles.snapshot : Ensure required packages are installed ---- 2.36s /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:6 linux-system-roles.snapshot : Ensure required packages are installed ---- 2.36s /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:6 fedora.linux_system_roles.storage : Make sure blivet is available ------- 2.35s /WORKDIR/git-module_workd5cejf_z/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 linux-system-roles.snapshot : Run snapshot module snapshot -------------- 1.85s /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:15 linux-system-roles.snapshot : Run snapshot module extend ---------------- 1.65s /WORKDIR/git-module_workd5cejf_z/tests/roles/linux-system-roles.snapshot/tasks/main.yml:15 ---^---^---^---^---^--- # STDERR: ---v---v---v---v---v--- [WARNING]: The value 15 (type int) in a string field was converted to '15' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change. [WARNING]: The value 30 (type int) in a string field was converted to '30' (type string). If this does not look like what you expect, quote the entire value to ensure it does not change. ---^---^---^---^---^---