# STDOUT: ---v---v---v---v---v--- ansible-playbook 2.9.27 config file = /etc/ansible/ansible.cfg configured module search path = ['/home/jenkins/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /opt/ansible-2.9/lib/python3.9/site-packages/ansible executable location = /opt/ansible-2.9/bin/ansible-playbook python version = 3.9.18 (main, Sep 7 2023, 00:00:00) [GCC 11.4.1 20230605 (Red Hat 11.4.1-2)] Using /etc/ansible/ansible.cfg as config file statically imported: /WORKDIR/git-weekly-cizhupc3wu/tests/create-test-file.yml statically imported: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-data-preservation.yml statically imported: /WORKDIR/git-weekly-cizhupc3wu/tests/create-test-file.yml statically imported: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-data-preservation.yml statically imported: /WORKDIR/git-weekly-cizhupc3wu/tests/create-test-file.yml statically imported: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-data-preservation.yml statically imported: /WORKDIR/git-weekly-cizhupc3wu/tests/create-test-file.yml statically imported: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-data-preservation.yml statically imported: /WORKDIR/git-weekly-cizhupc3wu/tests/create-test-file.yml statically imported: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-data-preservation.yml statically imported: /WORKDIR/git-weekly-cizhupc3wu/tests/create-test-file.yml statically imported: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-data-preservation.yml Skipping callback 'actionable', as we already have a stdout callback. Skipping callback 'counter_enabled', as we already have a stdout callback. Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'dense', as we already have a stdout callback. Skipping callback 'dense', as we already have a stdout callback. Skipping callback 'full_skip', as we already have a stdout callback. Skipping callback 'json', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'null', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. Skipping callback 'selective', as we already have a stdout callback. Skipping callback 'skippy', as we already have a stdout callback. Skipping callback 'stderr', as we already have a stdout callback. Skipping callback 'unixy', as we already have a stdout callback. Skipping callback 'yaml', as we already have a stdout callback. PLAYBOOK: tests_luks.yml ******************************************************* 1 plays in /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml PLAY [Test LUKS] *************************************************************** TASK [Gathering Facts] ********************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:2 Sunday 09 June 2024 04:15:02 +0000 (0:00:00.014) 0:00:00.014 *********** ok: [sut] META: ran handlers TASK [Enable FIPS mode] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:21 Sunday 09 June 2024 04:15:03 +0000 (0:00:00.747) 0:00:00.762 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reboot] ****************************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:25 Sunday 09 June 2024 04:15:03 +0000 (0:00:00.020) 0:00:00.782 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure dracut-fips] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:35 Sunday 09 June 2024 04:15:03 +0000 (0:00:00.019) 0:00:00.801 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Configure boot for FIPS] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:42 Sunday 09 June 2024 04:15:03 +0000 (0:00:00.019) 0:00:00.821 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reboot] ****************************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:51 Sunday 09 June 2024 04:15:03 +0000 (0:00:00.018) 0:00:00.840 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Run the role] ************************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:55 Sunday 09 June 2024 04:15:03 +0000 (0:00:00.019) 0:00:00.859 *********** TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Sunday 09 June 2024 04:15:03 +0000 (0:00:00.032) 0:00:00.892 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Sunday 09 June 2024 04:15:03 +0000 (0:00:00.024) 0:00:00.917 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Sunday 09 June 2024 04:15:03 +0000 (0:00:00.019) 0:00:00.937 *********** skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [linux-system-roles.storage : Check if system is ostree] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:26 Sunday 09 June 2024 04:15:03 +0000 (0:00:00.036) 0:00:00.973 *********** ok: [sut] => { "changed": false, "stat": { "exists": false } } TASK [linux-system-roles.storage : Set flag to indicate system is ostree] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:31 Sunday 09 June 2024 04:15:03 +0000 (0:00:00.241) 0:00:01.215 *********** ok: [sut] => { "ansible_facts": { "__storage_is_ostree": false }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Sunday 09 June 2024 04:15:03 +0000 (0:00:00.020) 0:00:01.235 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Sunday 09 June 2024 04:15:03 +0000 (0:00:00.009) 0:00:01.245 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Sunday 09 June 2024 04:15:03 +0000 (0:00:00.009) 0:00:01.254 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Sunday 09 June 2024 04:15:03 +0000 (0:00:00.035) 0:00:01.289 *********** changed: [sut] => { "changed": true, "rc": 0, "results": [ "Installed: libblockdev-crypto-2.28-10.el9.x86_64", "Installed: ndctl-libs-71.1-8.el9.x86_64", "Installed: vdo-8.2.2.2-1.el9.x86_64", "Installed: libblockdev-dm-2.28-10.el9.x86_64", "Installed: libblockdev-utils-2.28-10.el9.x86_64", "Installed: kmod-kvdo-8.2.3.3-125.el9.x86_64", "Installed: libblockdev-fs-2.28-10.el9.x86_64", "Installed: libblockdev-kbd-2.28-10.el9.x86_64", "Installed: lsof-4.94.0-3.el9.x86_64", "Installed: libbytesize-2.5-3.el9.x86_64", "Installed: python3-blivet-1:3.6.0-14.el9.noarch", "Installed: libblockdev-loop-2.28-10.el9.x86_64", "Installed: python3-blockdev-2.28-10.el9.x86_64", "Installed: daxctl-libs-71.1-8.el9.x86_64", "Installed: lvm2-9:2.03.23-2.el9.x86_64", "Installed: libblockdev-lvm-2.28-10.el9.x86_64", "Installed: python3-bytesize-2.5-3.el9.x86_64", "Installed: lvm2-libs-9:2.03.23-2.el9.x86_64", "Installed: libblockdev-mdraid-2.28-10.el9.x86_64", "Installed: volume_key-libs-0.3.12-15.el9.x86_64", "Installed: libblockdev-mpath-2.28-10.el9.x86_64", "Installed: device-mapper-event-9:1.02.197-2.el9.x86_64", "Installed: python3-pyparted-1:3.12.0-1.el9.x86_64", "Installed: mdadm-4.3-2.el9.x86_64", "Installed: libblockdev-nvdimm-2.28-10.el9.x86_64", "Installed: device-mapper-event-libs-9:1.02.197-2.el9.x86_64", "Installed: libaio-0.3.111-13.el9.x86_64", "Installed: device-mapper-multipath-0.8.7-27.el9.x86_64", "Installed: libblockdev-part-2.28-10.el9.x86_64", "Installed: blivet-data-1:3.6.0-14.el9.noarch", "Installed: device-mapper-multipath-libs-0.8.7-27.el9.x86_64", "Installed: libblockdev-2.28-10.el9.x86_64", "Installed: device-mapper-persistent-data-1.0.9-2.el9.x86_64", "Installed: ndctl-71.1-8.el9.x86_64", "Installed: libblockdev-swap-2.28-10.el9.x86_64" ] } lsrpackages: kmod-kvdo libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet vdo xfsprogs TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:11 Sunday 09 June 2024 04:15:51 +0000 (0:00:47.668) 0:00:48.958 *********** ok: [sut] => { "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined" } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:16 Sunday 09 June 2024 04:15:51 +0000 (0:00:00.019) 0:00:48.977 *********** ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:21 Sunday 09 June 2024 04:15:51 +0000 (0:00:00.021) 0:00:48.999 *********** ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:34 Sunday 09 June 2024 04:15:52 +0000 (0:00:00.480) 0:00:49.480 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for sut TASK [linux-system-roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Sunday 09 June 2024 04:15:52 +0000 (0:00:00.034) 0:00:49.514 *********** TASK [linux-system-roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Sunday 09 June 2024 04:15:52 +0000 (0:00:00.009) 0:00:49.523 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable COPRs] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:20 Sunday 09 June 2024 04:15:52 +0000 (0:00:00.010) 0:00:49.534 *********** TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:41 Sunday 09 June 2024 04:15:52 +0000 (0:00:00.009) 0:00:49.543 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kpartx TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 Sunday 09 June 2024 04:15:53 +0000 (0:00:00.872) 0:00:50.416 *********** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:64 Sunday 09 June 2024 04:15:54 +0000 (0:00:01.549) 0:00:51.966 *********** ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:78 Sunday 09 June 2024 04:15:54 +0000 (0:00:00.021) 0:00:51.987 *********** TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 Sunday 09 June 2024 04:15:54 +0000 (0:00:00.013) 0:00:52.000 *********** ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:98 Sunday 09 June 2024 04:15:54 +0000 (0:00:00.315) 0:00:52.316 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Sunday 09 June 2024 04:15:54 +0000 (0:00:00.013) 0:00:52.329 *********** TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:116 Sunday 09 June 2024 04:15:54 +0000 (0:00:00.011) 0:00:52.340 *********** ok: [sut] => { "blivet_output": { "actions": [], "changed": false, "crypts": [], "failed": false, "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:125 Sunday 09 June 2024 04:15:55 +0000 (0:00:00.013) 0:00:52.354 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:129 Sunday 09 June 2024 04:15:55 +0000 (0:00:00.014) 0:00:52.368 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:145 Sunday 09 June 2024 04:15:55 +0000 (0:00:00.013) 0:00:52.382 *********** TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:157 Sunday 09 June 2024 04:15:55 +0000 (0:00:00.012) 0:00:52.394 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:162 Sunday 09 June 2024 04:15:55 +0000 (0:00:00.012) 0:00:52.406 *********** TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Sunday 09 June 2024 04:15:55 +0000 (0:00:00.012) 0:00:52.418 *********** TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:189 Sunday 09 June 2024 04:15:55 +0000 (0:00:00.011) 0:00:52.430 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:197 Sunday 09 June 2024 04:15:55 +0000 (0:00:00.013) 0:00:52.443 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906364.359217, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1716902420.402, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 2097284, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1716902144.521, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "3342962587", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:202 Sunday 09 June 2024 04:15:55 +0000 (0:00:00.190) 0:00:52.633 *********** TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:224 Sunday 09 June 2024 04:15:55 +0000 (0:00:00.015) 0:00:52.648 *********** ok: [sut] TASK [Get unused disks] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:59 Sunday 09 June 2024 04:15:55 +0000 (0:00:00.605) 0:00:53.254 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/get_unused_disk.yml for sut TASK [Ensure test packages] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/get_unused_disk.yml:2 Sunday 09 June 2024 04:15:55 +0000 (0:00:00.021) 0:00:53.275 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: util-linux-core TASK [Find unused disks in the system] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/get_unused_disk.yml:16 Sunday 09 June 2024 04:15:56 +0000 (0:00:00.859) 0:00:54.135 *********** ok: [sut] => { "changed": false, "disks": [ "sda" ] } TASK [Debug why there are no unused disks] ************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/get_unused_disk.yml:25 Sunday 09 June 2024 04:15:57 +0000 (0:00:00.257) 0:00:54.392 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set unused_disks if necessary] ******************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/get_unused_disk.yml:34 Sunday 09 June 2024 04:15:57 +0000 (0:00:00.012) 0:00:54.405 *********** ok: [sut] => { "ansible_facts": { "unused_disks": [ "sda" ] }, "changed": false } TASK [Exit playbook when there's not enough unused disks in the system] ******** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/get_unused_disk.yml:39 Sunday 09 June 2024 04:15:57 +0000 (0:00:00.014) 0:00:54.419 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print unused disks] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/get_unused_disk.yml:44 Sunday 09 June 2024 04:15:57 +0000 (0:00:00.012) 0:00:54.432 *********** ok: [sut] => { "unused_disks": [ "sda" ] } TASK [Test for correct handling of new encrypted volume w/ no key] ************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:68 Sunday 09 June 2024 04:15:57 +0000 (0:00:00.013) 0:00:54.445 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml for sut TASK [Store global variable value copy] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:4 Sunday 09 June 2024 04:15:57 +0000 (0:00:00.019) 0:00:54.465 *********** ok: [sut] => { "ansible_facts": { "storage_pools_global": [], "storage_safe_mode_global": true, "storage_volumes_global": [] }, "changed": false } TASK [Verify role raises correct error] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:10 Sunday 09 June 2024 04:15:57 +0000 (0:00:00.014) 0:00:54.479 *********** TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Sunday 09 June 2024 04:15:57 +0000 (0:00:00.018) 0:00:54.498 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Sunday 09 June 2024 04:15:57 +0000 (0:00:00.016) 0:00:54.514 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Sunday 09 June 2024 04:15:57 +0000 (0:00:00.014) 0:00:54.528 *********** skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [linux-system-roles.storage : Check if system is ostree] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:26 Sunday 09 June 2024 04:15:57 +0000 (0:00:00.052) 0:00:54.581 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set flag to indicate system is ostree] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:31 Sunday 09 June 2024 04:15:57 +0000 (0:00:00.012) 0:00:54.594 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Sunday 09 June 2024 04:15:57 +0000 (0:00:00.012) 0:00:54.606 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Sunday 09 June 2024 04:15:57 +0000 (0:00:00.011) 0:00:54.617 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Sunday 09 June 2024 04:15:57 +0000 (0:00:00.010) 0:00:54.628 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Sunday 09 June 2024 04:15:57 +0000 (0:00:00.025) 0:00:54.653 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kmod-kvdo libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet vdo xfsprogs TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:11 Sunday 09 June 2024 04:15:58 +0000 (0:00:00.856) 0:00:55.510 *********** ok: [sut] => { "storage_pools": [] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:16 Sunday 09 June 2024 04:15:58 +0000 (0:00:00.015) 0:00:55.526 *********** ok: [sut] => { "storage_volumes": [ { "disks": [ "sda" ], "encryption": true, "mount_point": "/opt/test1", "name": "foo", "type": "disk" } ] } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:21 Sunday 09 June 2024 04:15:58 +0000 (0:00:00.015) 0:00:55.542 *********** ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "cryptsetup" ], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:34 Sunday 09 June 2024 04:15:59 +0000 (0:00:01.068) 0:00:56.610 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for sut TASK [linux-system-roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Sunday 09 June 2024 04:15:59 +0000 (0:00:00.021) 0:00:56.632 *********** TASK [linux-system-roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Sunday 09 June 2024 04:15:59 +0000 (0:00:00.011) 0:00:56.644 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable COPRs] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:20 Sunday 09 June 2024 04:15:59 +0000 (0:00:00.012) 0:00:56.656 *********** TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:41 Sunday 09 June 2024 04:15:59 +0000 (0:00:00.012) 0:00:56.668 *********** changed: [sut] => { "changed": true, "rc": 0, "results": [ "Installed: cryptsetup-2.7.2-1.el9.x86_64" ] } lsrpackages: cryptsetup kpartx TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 Sunday 09 June 2024 04:16:00 +0000 (0:00:01.679) 0:00:58.348 *********** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:64 Sunday 09 June 2024 04:16:02 +0000 (0:00:01.479) 0:00:59.828 *********** ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:78 Sunday 09 June 2024 04:16:02 +0000 (0:00:00.021) 0:00:59.849 *********** TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 Sunday 09 June 2024 04:16:02 +0000 (0:00:00.012) 0:00:59.862 *********** fatal: [sut]: FAILED! => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } MSG: encrypted volume 'foo' missing key/password TASK [linux-system-roles.storage : Failed message] ***************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:106 Sunday 09 June 2024 04:16:03 +0000 (0:00:00.891) 0:01:00.753 *********** fatal: [sut]: FAILED! => { "changed": false } MSG: {'msg': "encrypted volume 'foo' missing key/password", 'changed': False, 'actions': [], 'leaves': [], 'mounts': [], 'crypts': [], 'pools': [], 'volumes': [], 'packages': [], 'failed': True, 'invocation': {'module_args': {'pools': [], 'volumes': [{'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'foo', 'raid_level': None, 'size': 10737418240, 'state': 'present', 'type': 'disk', 'disks': ['sda'], 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_stripe_size': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': []}], 'use_partitions': None, 'disklabel_type': None, 'pool_defaults': {'state': 'present', 'type': 'lvm', 'disks': [], 'volumes': [], 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'shared': False}, 'volume_defaults': {'state': 'present', 'type': 'lvm', 'size': 0, 'disks': [], 'fs_type': 'xfs', 'fs_label': '', 'fs_create_options': '', 'fs_overwrite_existing': True, 'mount_point': '', 'mount_options': 'defaults', 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_stripe_size': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': []}, 'safe_mode': True, 'packages_only': False, 'diskvolume_mkfs_option_map': {}}}, '_ansible_no_log': False} TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Sunday 09 June 2024 04:16:03 +0000 (0:00:00.016) 0:01:00.769 *********** TASK [Check that we failed in the role] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:29 Sunday 09 June 2024 04:16:03 +0000 (0:00:00.011) 0:01:00.780 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the blivet output and error message are correct] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:34 Sunday 09 June 2024 04:16:03 +0000 (0:00:00.014) 0:01:00.795 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify correct exception or error message] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:45 Sunday 09 June 2024 04:16:03 +0000 (0:00:00.017) 0:01:00.813 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Create an encrypted disk volume w/ default fs] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:83 Sunday 09 June 2024 04:16:03 +0000 (0:00:00.012) 0:01:00.825 *********** TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Sunday 09 June 2024 04:16:03 +0000 (0:00:00.027) 0:01:00.852 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Sunday 09 June 2024 04:16:03 +0000 (0:00:00.017) 0:01:00.870 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Sunday 09 June 2024 04:16:03 +0000 (0:00:00.057) 0:01:00.927 *********** skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [linux-system-roles.storage : Check if system is ostree] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:26 Sunday 09 June 2024 04:16:03 +0000 (0:00:00.032) 0:01:00.960 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set flag to indicate system is ostree] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:31 Sunday 09 June 2024 04:16:03 +0000 (0:00:00.012) 0:01:00.973 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Sunday 09 June 2024 04:16:03 +0000 (0:00:00.015) 0:01:00.988 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Sunday 09 June 2024 04:16:03 +0000 (0:00:00.012) 0:01:01.001 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Sunday 09 June 2024 04:16:03 +0000 (0:00:00.012) 0:01:01.013 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Sunday 09 June 2024 04:16:03 +0000 (0:00:00.023) 0:01:01.037 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kmod-kvdo libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet vdo xfsprogs TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:11 Sunday 09 June 2024 04:16:04 +0000 (0:00:00.868) 0:01:01.905 *********** ok: [sut] => { "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined" } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:16 Sunday 09 June 2024 04:16:04 +0000 (0:00:00.013) 0:01:01.919 *********** ok: [sut] => { "storage_volumes": [ { "disks": [ "sda" ], "encryption": true, "encryption_password": "yabbadabbadoo", "mount_point": "/opt/test1", "name": "foo", "type": "disk" } ] } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:21 Sunday 09 June 2024 04:16:04 +0000 (0:00:00.016) 0:01:01.935 *********** ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "cryptsetup" ], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:34 Sunday 09 June 2024 04:16:05 +0000 (0:00:00.848) 0:01:02.783 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for sut TASK [linux-system-roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Sunday 09 June 2024 04:16:05 +0000 (0:00:00.023) 0:01:02.807 *********** TASK [linux-system-roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Sunday 09 June 2024 04:16:05 +0000 (0:00:00.021) 0:01:02.828 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable COPRs] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:20 Sunday 09 June 2024 04:16:05 +0000 (0:00:00.013) 0:01:02.842 *********** TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:41 Sunday 09 June 2024 04:16:05 +0000 (0:00:00.012) 0:01:02.854 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup kpartx TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 Sunday 09 June 2024 04:16:06 +0000 (0:00:00.856) 0:01:03.710 *********** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:64 Sunday 09 June 2024 04:16:07 +0000 (0:00:01.457) 0:01:05.167 *********** ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:78 Sunday 09 June 2024 04:16:07 +0000 (0:00:00.019) 0:01:05.186 *********** TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 Sunday 09 June 2024 04:16:07 +0000 (0:00:00.011) 0:01:05.198 *********** changed: [sut] => { "actions": [ { "action": "create format", "device": "/dev/sda", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "fs_type": "xfs" } ], "changed": true, "crypts": [ { "backing_device": "/dev/sda", "name": "luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "password": "-", "state": "present" } ], "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "state": "mounted" } ], "packages": [ "xfsprogs", "cryptsetup" ], "pools": [], "volumes": [ { "_device": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "_raw_device": "/dev/sda", "_raw_kernel_device": "/dev/sda", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 10737418240, "state": "present", "thin": null, "thin_pool_name": null, "thin_pool_size": null, "type": "disk", "vdo_pool_size": null } ] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:98 Sunday 09 June 2024 04:16:18 +0000 (0:00:10.586) 0:01:15.784 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Sunday 09 June 2024 04:16:18 +0000 (0:00:00.012) 0:01:15.796 *********** TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:116 Sunday 09 June 2024 04:16:18 +0000 (0:00:00.011) 0:01:15.808 *********** ok: [sut] => { "blivet_output": { "actions": [ { "action": "create format", "device": "/dev/sda", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "fs_type": "xfs" } ], "changed": true, "crypts": [ { "backing_device": "/dev/sda", "name": "luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "password": "-", "state": "present" } ], "failed": false, "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "state": "mounted" } ], "packages": [ "xfsprogs", "cryptsetup" ], "pools": [], "volumes": [ { "_device": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "_raw_device": "/dev/sda", "_raw_kernel_device": "/dev/sda", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 10737418240, "state": "present", "thin": null, "thin_pool_name": null, "thin_pool_size": null, "type": "disk", "vdo_pool_size": null } ] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:125 Sunday 09 June 2024 04:16:18 +0000 (0:00:00.015) 0:01:15.823 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:129 Sunday 09 June 2024 04:16:18 +0000 (0:00:00.014) 0:01:15.838 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [ { "_device": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "_raw_device": "/dev/sda", "_raw_kernel_device": "/dev/sda", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 10737418240, "state": "present", "thin": null, "thin_pool_name": null, "thin_pool_size": null, "type": "disk", "vdo_pool_size": null } ] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:145 Sunday 09 June 2024 04:16:18 +0000 (0:00:00.015) 0:01:15.853 *********** TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:157 Sunday 09 June 2024 04:16:18 +0000 (0:00:00.012) 0:01:15.866 *********** ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:162 Sunday 09 June 2024 04:16:19 +0000 (0:00:00.659) 0:01:16.525 *********** changed: [sut] => (item={'src': '/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621" } TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Sunday 09 June 2024 04:16:19 +0000 (0:00:00.281) 0:01:16.807 *********** skipping: [sut] => (item={'src': '/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:189 Sunday 09 June 2024 04:16:19 +0000 (0:00:00.019) 0:01:16.826 *********** ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:197 Sunday 09 June 2024 04:16:19 +0000 (0:00:00.476) 0:01:17.302 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906364.359217, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1716902420.402, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 2097284, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1716902144.521, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "3342962587", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:202 Sunday 09 June 2024 04:16:20 +0000 (0:00:00.177) 0:01:17.480 *********** changed: [sut] => (item={'backing_device': '/dev/sda', 'name': 'luks-7b2ca059-4e70-41d0-a900-338aa36c8621', 'password': '-', 'state': 'present'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sda", "name": "luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "password": "-", "state": "present" } } MSG: line added TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:224 Sunday 09 June 2024 04:16:20 +0000 (0:00:00.232) 0:01:17.712 *********** ok: [sut] TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:95 Sunday 09 June 2024 04:16:20 +0000 (0:00:00.627) 0:01:18.339 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:2 Sunday 09 June 2024 04:16:21 +0000 (0:00:00.026) 0:01:18.366 *********** skipping: [sut] => {} TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:7 Sunday 09 June 2024 04:16:21 +0000 (0:00:00.013) 0:01:18.379 *********** ok: [sut] => { "_storage_volumes_list": [ { "_device": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "_raw_device": "/dev/sda", "_raw_kernel_device": "/dev/sda", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 10737418240, "state": "present", "thin": null, "thin_pool_name": null, "thin_pool_size": null, "type": "disk", "vdo_pool_size": null } ] } TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:15 Sunday 09 June 2024 04:16:21 +0000 (0:00:00.016) 0:01:18.395 *********** ok: [sut] => { "changed": false, "info": { "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621": { "fstype": "xfs", "label": "", "name": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "size": "10G", "type": "crypt", "uuid": "f2403da8-b341-4b12-b63f-465ee71d6b73" }, "/dev/sda": { "fstype": "crypto_LUKS", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "7b2ca059-4e70-41d0-a900-338aa36c8621" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "xfs", "label": "", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "82a26900-2b36-49b9-bc49-f4d2f0bb14b8" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:20 Sunday 09 June 2024 04:16:21 +0000 (0:00:00.264) 0:01:18.659 *********** ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002916", "end": "2024-06-09 04:16:21.549205", "rc": 0, "start": "2024-06-09 04:16:21.546289" } STDOUT: # # /etc/fstab # Created by anaconda on Tue May 28 13:15:44 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=82a26900-2b36-49b9-bc49-f4d2f0bb14b8 / xfs defaults 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621 /opt/test1 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:25 Sunday 09 June 2024 04:16:21 +0000 (0:00:00.260) 0:01:18.920 *********** ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.006004", "end": "2024-06-09 04:16:21.724608", "failed_when_result": false, "rc": 0, "start": "2024-06-09 04:16:21.718604" } STDOUT: luks-7b2ca059-4e70-41d0-a900-338aa36c8621 /dev/sda - TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:34 Sunday 09 June 2024 04:16:21 +0000 (0:00:00.174) 0:01:19.094 *********** TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:44 Sunday 09 June 2024 04:16:21 +0000 (0:00:00.011) 0:01:19.105 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml for sut TASK [Set storage volume test variables] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:2 Sunday 09 June 2024 04:16:21 +0000 (0:00:00.023) 0:01:19.129 *********** ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for {{ storage_test_volume_subset }}] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:21 Sunday 09 June 2024 04:16:21 +0000 (0:00:00.015) 0:01:19.145 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml for sut TASK [Get expected mount device based on device type] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:7 Sunday 09 June 2024 04:16:21 +0000 (0:00:00.057) 0:01:19.202 *********** ok: [sut] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:16 Sunday 09 June 2024 04:16:21 +0000 (0:00:00.015) 0:01:19.217 *********** ok: [sut] => { "ansible_facts": { "storage_test_mount_device_matches": [ { "block_available": 2574562, "block_size": 4096, "block_total": 2600960, "block_used": 26398, "device": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "fstype": "xfs", "inode_available": 5234685, "inode_total": 5234688, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 10545405952, "size_total": 10653532160, "uuid": "f2403da8-b341-4b12-b63f-465ee71d6b73" } ], "storage_test_mount_expected_match_count": "1", "storage_test_mount_point_matches": [ { "block_available": 2574562, "block_size": 4096, "block_total": 2600960, "block_used": 26398, "device": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "fstype": "xfs", "inode_available": 5234685, "inode_total": 5234688, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 10545405952, "size_total": 10653532160, "uuid": "f2403da8-b341-4b12-b63f-465ee71d6b73" } ], "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:38 Sunday 09 June 2024 04:16:21 +0000 (0:00:00.020) 0:01:19.238 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:51 Sunday 09 June 2024 04:16:21 +0000 (0:00:00.013) 0:01:19.251 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the current mount state by mount point] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:63 Sunday 09 June 2024 04:16:21 +0000 (0:00:00.037) 0:01:19.289 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:71 Sunday 09 June 2024 04:16:21 +0000 (0:00:00.016) 0:01:19.305 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:83 Sunday 09 June 2024 04:16:21 +0000 (0:00:00.012) 0:01:19.318 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:95 Sunday 09 June 2024 04:16:21 +0000 (0:00:00.012) 0:01:19.330 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the mount fs type] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:110 Sunday 09 June 2024 04:16:21 +0000 (0:00:00.012) 0:01:19.342 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Get path of test volume device] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:122 Sunday 09 June 2024 04:16:22 +0000 (0:00:00.015) 0:01:19.358 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:128 Sunday 09 June 2024 04:16:22 +0000 (0:00:00.013) 0:01:19.372 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:134 Sunday 09 June 2024 04:16:22 +0000 (0:00:00.012) 0:01:19.385 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:146 Sunday 09 June 2024 04:16:22 +0000 (0:00:00.012) 0:01:19.397 *********** ok: [sut] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_device_matches": null, "storage_test_mount_expected_match_count": null, "storage_test_mount_point_matches": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:2 Sunday 09 June 2024 04:16:22 +0000 (0:00:00.011) 0:01:19.409 *********** ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:40 Sunday 09 June 2024 04:16:22 +0000 (0:00:00.024) 0:01:19.434 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:48 Sunday 09 June 2024 04:16:22 +0000 (0:00:00.015) 0:01:19.450 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:58 Sunday 09 June 2024 04:16:22 +0000 (0:00:00.015) 0:01:19.466 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:71 Sunday 09 June 2024 04:16:22 +0000 (0:00:00.012) 0:01:19.478 *********** ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml:3 Sunday 09 June 2024 04:16:22 +0000 (0:00:00.011) 0:01:19.489 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml:12 Sunday 09 June 2024 04:16:22 +0000 (0:00:00.016) 0:01:19.506 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:3 Sunday 09 June 2024 04:16:22 +0000 (0:00:00.016) 0:01:19.523 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906577.9721177, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1717906577.9721177, "dev": 5, "device_type": 2048, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 446, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/blockdevice", "mode": "0660", "mtime": 1717906577.9721177, "nlink": 1, "path": "/dev/sda", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:9 Sunday 09 June 2024 04:16:22 +0000 (0:00:00.180) 0:01:19.704 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:16 Sunday 09 June 2024 04:16:22 +0000 (0:00:00.016) 0:01:19.720 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:24 Sunday 09 June 2024 04:16:22 +0000 (0:00:00.012) 0:01:19.732 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:30 Sunday 09 June 2024 04:16:22 +0000 (0:00:00.014) 0:01:19.747 *********** ok: [sut] => { "ansible_facts": { "st_volume_type": "disk" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:34 Sunday 09 June 2024 04:16:22 +0000 (0:00:00.014) 0:01:19.762 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:39 Sunday 09 June 2024 04:16:22 +0000 (0:00:00.012) 0:01:19.774 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:3 Sunday 09 June 2024 04:16:22 +0000 (0:00:00.014) 0:01:19.789 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906578.3541193, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1717906578.3541193, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 681, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1717906578.3541193, "nlink": 1, "path": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:10 Sunday 09 June 2024 04:16:22 +0000 (0:00:00.183) 0:01:19.973 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:17 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.871) 0:01:20.845 *********** ok: [sut] => { "changed": false, "cmd": [ "cryptsetup", "luksDump", "/dev/sda" ], "delta": "0:00:00.006390", "end": "2024-06-09 04:16:23.651895", "rc": 0, "start": "2024-06-09 04:16:23.645505" } STDOUT: LUKS header information Version: 2 Epoch: 3 Metadata area: 16384 [bytes] Keyslots area: 16744448 [bytes] UUID: 7b2ca059-4e70-41d0-a900-338aa36c8621 Label: (no label) Subsystem: (no subsystem) Flags: (no flags) Data segments: 0: crypt offset: 16777216 [bytes] length: (whole device) cipher: aes-xts-plain64 sector: 512 [bytes] Keyslots: 0: luks2 Key: 512 bits Priority: normal Cipher: aes-xts-plain64 Cipher key: 512 bits PBKDF: argon2id Time cost: 4 Memory: 672167 Threads: 2 Salt: 0c bc 97 4c 45 fa d6 43 37 9e e4 39 c2 2c ea 5c c9 02 4d 42 06 98 30 8a 06 d5 7e 44 f9 11 fd e4 AF stripes: 4000 AF hash: sha256 Area offset:32768 [bytes] Area length:258048 [bytes] Digest ID: 0 Tokens: Digests: 0: pbkdf2 Hash: sha256 Iterations: 102721 Salt: b5 f7 e4 c9 60 87 18 2a 54 a3 51 e9 90 00 15 52 2e c7 30 84 ff bf a4 0f aa d8 7b 4d 46 01 5d 19 Digest: 70 fa 01 82 95 fe 17 24 ce 5d ff 4e 2b f6 30 7a d4 ff 7c 4e 44 0d 3c 03 82 2c c3 1e 1c 38 2e 2b TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:23 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.177) 0:01:21.022 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:32 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.017) 0:01:21.039 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:45 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.018) 0:01:21.057 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:51 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.016) 0:01:21.073 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:56 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.015) 0:01:21.089 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:69 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.012) 0:01:21.102 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:81 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.013) 0:01:21.116 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:94 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.013) 0:01:21.129 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [ "luks-7b2ca059-4e70-41d0-a900-338aa36c8621 /dev/sda -" ], "_storage_test_expected_crypttab_entries": "1", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:106 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.017) 0:01:21.147 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:114 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.014) 0:01:21.161 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:122 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.015) 0:01:21.177 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:131 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.015) 0:01:21.193 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:140 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.018) 0:01:21.211 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:8 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.011) 0:01:21.223 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:14 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.012) 0:01:21.235 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:21 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.012) 0:01:21.248 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:28 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.012) 0:01:21.260 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:35 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.012) 0:01:21.272 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:45 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.013) 0:01:21.286 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:54 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.012) 0:01:21.298 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:63 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.012) 0:01:21.311 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:72 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.012) 0:01:21.323 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:81 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.012) 0:01:21.336 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:3 Sunday 09 June 2024 04:16:23 +0000 (0:00:00.012) 0:01:21.348 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested size of the volume] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:11 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.014) 0:01:21.362 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:20 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.013) 0:01:21.376 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:28 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.013) 0:01:21.390 *********** ok: [sut] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Get the size of parent/pool device] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:32 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.012) 0:01:21.402 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show test pool] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:46 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.013) 0:01:21.416 *********** skipping: [sut] => {} TASK [Show test blockinfo] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:50 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.013) 0:01:21.429 *********** skipping: [sut] => {} TASK [Show test pool size] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:54 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.013) 0:01:21.443 *********** skipping: [sut] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:58 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.013) 0:01:21.456 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:68 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.013) 0:01:21.470 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:72 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.012) 0:01:21.482 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:77 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.012) 0:01:21.495 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:83 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.012) 0:01:21.507 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:88 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.012) 0:01:21.520 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:96 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.012) 0:01:21.532 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:104 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.012) 0:01:21.545 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:109 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.011) 0:01:21.556 *********** skipping: [sut] => {} TASK [Show volume thin pool size] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:113 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.012) 0:01:21.569 *********** skipping: [sut] => {} TASK [Show test volume size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:117 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.012) 0:01:21.581 *********** skipping: [sut] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:121 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.034) 0:01:21.616 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:129 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.013) 0:01:21.629 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:138 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.012) 0:01:21.642 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:142 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.012) 0:01:21.654 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:150 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.011) 0:01:21.666 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:156 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.012) 0:01:21.678 *********** ok: [sut] => { "storage_test_actual_size": { "changed": false, "skip_reason": "Conditional result was False", "skipped": true } } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:160 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.015) 0:01:21.693 *********** ok: [sut] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Assert expected size is actual size] ************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:164 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.013) 0:01:21.707 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:5 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.013) 0:01:21.721 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:13 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.012) 0:01:21.734 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:18 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.012) 0:01:21.746 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV cache size] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:27 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.012) 0:01:21.758 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:35 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.013) 0:01:21.772 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:41 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.012) 0:01:21.785 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:47 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.012) 0:01:21.797 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:27 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.012) 0:01:21.810 *********** ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:54 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.012) 0:01:21.822 *********** ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Create a file] *********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/create-test-file.yml:12 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.011) 0:01:21.834 *********** changed: [sut] => { "changed": true, "dest": "/opt/test1/quux", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "secontext": "unconfined_u:object_r:unlabeled_t:s0", "size": 0, "state": "file", "uid": 0 } TASK [Test for correct handling of safe_mode] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:101 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.235) 0:01:22.070 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml for sut TASK [Store global variable value copy] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:4 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.024) 0:01:22.094 *********** ok: [sut] => { "ansible_facts": { "storage_pools_global": [], "storage_safe_mode_global": true, "storage_volumes_global": [] }, "changed": false } TASK [Verify role raises correct error] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:10 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.016) 0:01:22.110 *********** TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.016) 0:01:22.127 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.017) 0:01:22.145 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.014) 0:01:22.159 *********** skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [linux-system-roles.storage : Check if system is ostree] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:26 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.032) 0:01:22.192 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set flag to indicate system is ostree] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:31 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.012) 0:01:22.205 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.012) 0:01:22.218 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.012) 0:01:22.230 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.011) 0:01:22.242 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Sunday 09 June 2024 04:16:24 +0000 (0:00:00.025) 0:01:22.267 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kmod-kvdo libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet vdo xfsprogs TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:11 Sunday 09 June 2024 04:16:25 +0000 (0:00:00.872) 0:01:23.139 *********** ok: [sut] => { "storage_pools": [] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:16 Sunday 09 June 2024 04:16:25 +0000 (0:00:00.016) 0:01:23.156 *********** ok: [sut] => { "storage_volumes": [ { "disks": [ "sda" ], "encryption": false, "encryption_password": "yabbadabbadoo", "mount_point": "/opt/test1", "name": "foo", "type": "disk" } ] } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:21 Sunday 09 June 2024 04:16:25 +0000 (0:00:00.016) 0:01:23.173 *********** ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:34 Sunday 09 June 2024 04:16:26 +0000 (0:00:00.973) 0:01:24.146 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for sut TASK [linux-system-roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Sunday 09 June 2024 04:16:26 +0000 (0:00:00.024) 0:01:24.171 *********** TASK [linux-system-roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Sunday 09 June 2024 04:16:26 +0000 (0:00:00.026) 0:01:24.197 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable COPRs] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:20 Sunday 09 June 2024 04:16:26 +0000 (0:00:00.017) 0:01:24.214 *********** TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:41 Sunday 09 June 2024 04:16:26 +0000 (0:00:00.013) 0:01:24.228 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kpartx TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 Sunday 09 June 2024 04:16:27 +0000 (0:00:00.870) 0:01:25.099 *********** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:64 Sunday 09 June 2024 04:16:29 +0000 (0:00:01.510) 0:01:26.609 *********** ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:78 Sunday 09 June 2024 04:16:29 +0000 (0:00:00.022) 0:01:26.631 *********** TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 Sunday 09 June 2024 04:16:29 +0000 (0:00:00.014) 0:01:26.646 *********** fatal: [sut]: FAILED! => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } MSG: cannot remove existing formatting on device 'luks-7b2ca059-4e70-41d0-a900-338aa36c8621' in safe mode due to encryption removal TASK [linux-system-roles.storage : Failed message] ***************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:106 Sunday 09 June 2024 04:16:30 +0000 (0:00:00.985) 0:01:27.631 *********** fatal: [sut]: FAILED! => { "changed": false } MSG: {'msg': "cannot remove existing formatting on device 'luks-7b2ca059-4e70-41d0-a900-338aa36c8621' in safe mode due to encryption removal", 'changed': False, 'actions': [], 'leaves': [], 'mounts': [], 'crypts': [], 'pools': [], 'volumes': [], 'packages': [], 'failed': True, 'invocation': {'module_args': {'pools': [], 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': 0, 'encryption_luks_version': 'luks2', 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'foo', 'raid_level': None, 'size': 10720641024, 'state': 'present', 'type': 'disk', 'disks': ['sda'], 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_stripe_size': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': []}], 'use_partitions': None, 'disklabel_type': None, 'pool_defaults': {'state': 'present', 'type': 'lvm', 'disks': [], 'volumes': [], 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'shared': False}, 'volume_defaults': {'state': 'present', 'type': 'lvm', 'size': 0, 'disks': [], 'fs_type': 'xfs', 'fs_label': '', 'fs_create_options': '', 'fs_overwrite_existing': True, 'mount_point': '', 'mount_options': 'defaults', 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_stripe_size': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': []}, 'safe_mode': True, 'packages_only': False, 'diskvolume_mkfs_option_map': {}}}, '_ansible_no_log': False} TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Sunday 09 June 2024 04:16:30 +0000 (0:00:00.017) 0:01:27.649 *********** TASK [Check that we failed in the role] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:29 Sunday 09 June 2024 04:16:30 +0000 (0:00:00.012) 0:01:27.661 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the blivet output and error message are correct] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:34 Sunday 09 June 2024 04:16:30 +0000 (0:00:00.014) 0:01:27.676 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify correct exception or error message] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:45 Sunday 09 June 2024 04:16:30 +0000 (0:00:00.019) 0:01:27.695 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Stat the file] *********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-data-preservation.yml:11 Sunday 09 June 2024 04:16:30 +0000 (0:00:00.012) 0:01:27.707 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906584.6981444, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1717906584.6981444, "dev": 64768, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0644", "mtime": 1717906584.6981444, "nlink": 1, "path": "/opt/test1/quux", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 0, "uid": 0, "version": "238069845", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Assert file presence] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-data-preservation.yml:16 Sunday 09 June 2024 04:16:30 +0000 (0:00:00.177) 0:01:27.885 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Remove the encryption layer] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:121 Sunday 09 June 2024 04:16:30 +0000 (0:00:00.015) 0:01:27.901 *********** TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Sunday 09 June 2024 04:16:30 +0000 (0:00:00.032) 0:01:27.933 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Sunday 09 June 2024 04:16:30 +0000 (0:00:00.019) 0:01:27.953 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Sunday 09 June 2024 04:16:30 +0000 (0:00:00.015) 0:01:27.968 *********** skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [linux-system-roles.storage : Check if system is ostree] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:26 Sunday 09 June 2024 04:16:30 +0000 (0:00:00.033) 0:01:28.002 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set flag to indicate system is ostree] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:31 Sunday 09 June 2024 04:16:30 +0000 (0:00:00.012) 0:01:28.015 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Sunday 09 June 2024 04:16:30 +0000 (0:00:00.013) 0:01:28.028 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Sunday 09 June 2024 04:16:30 +0000 (0:00:00.011) 0:01:28.040 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Sunday 09 June 2024 04:16:30 +0000 (0:00:00.011) 0:01:28.051 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Sunday 09 June 2024 04:16:30 +0000 (0:00:00.024) 0:01:28.076 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kmod-kvdo libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet vdo xfsprogs TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:11 Sunday 09 June 2024 04:16:31 +0000 (0:00:00.858) 0:01:28.935 *********** ok: [sut] => { "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined" } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:16 Sunday 09 June 2024 04:16:31 +0000 (0:00:00.013) 0:01:28.948 *********** ok: [sut] => { "storage_volumes": [ { "disks": [ "sda" ], "encryption": false, "encryption_password": "yabbadabbadoo", "mount_point": "/opt/test1", "name": "foo", "type": "disk" } ] } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:21 Sunday 09 June 2024 04:16:31 +0000 (0:00:00.014) 0:01:28.963 *********** ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:34 Sunday 09 June 2024 04:16:32 +0000 (0:00:00.942) 0:01:29.905 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for sut TASK [linux-system-roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Sunday 09 June 2024 04:16:32 +0000 (0:00:00.023) 0:01:29.929 *********** TASK [linux-system-roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Sunday 09 June 2024 04:16:32 +0000 (0:00:00.012) 0:01:29.941 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable COPRs] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:20 Sunday 09 June 2024 04:16:32 +0000 (0:00:00.012) 0:01:29.954 *********** TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:41 Sunday 09 June 2024 04:16:32 +0000 (0:00:00.011) 0:01:29.966 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kpartx TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 Sunday 09 June 2024 04:16:33 +0000 (0:00:00.864) 0:01:30.830 *********** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:64 Sunday 09 June 2024 04:16:34 +0000 (0:00:01.459) 0:01:32.290 *********** ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:78 Sunday 09 June 2024 04:16:34 +0000 (0:00:00.019) 0:01:32.310 *********** TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 Sunday 09 June 2024 04:16:34 +0000 (0:00:00.011) 0:01:32.321 *********** changed: [sut] => { "actions": [ { "action": "destroy format", "device": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "luks" }, { "action": "create format", "device": "/dev/sda", "fs_type": "xfs" } ], "changed": true, "crypts": [ { "backing_device": "/dev/sda", "name": "luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "password": "-", "state": "absent" } ], "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "state": "absent" }, { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=faa4e300-f20d-4b0f-b1d3-e57a1eb71e00", "state": "mounted" } ], "packages": [ "xfsprogs" ], "pools": [], "volumes": [ { "_device": "/dev/sda", "_kernel_device": "/dev/sda", "_mount_id": "UUID=faa4e300-f20d-4b0f-b1d3-e57a1eb71e00", "_raw_device": "/dev/sda", "_raw_kernel_device": "/dev/sda", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks2", "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 10720641024, "state": "present", "thin": null, "thin_pool_name": null, "thin_pool_size": null, "type": "disk", "vdo_pool_size": null } ] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:98 Sunday 09 June 2024 04:16:36 +0000 (0:00:01.468) 0:01:33.790 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Sunday 09 June 2024 04:16:36 +0000 (0:00:00.012) 0:01:33.803 *********** TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:116 Sunday 09 June 2024 04:16:36 +0000 (0:00:00.011) 0:01:33.814 *********** ok: [sut] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "luks" }, { "action": "create format", "device": "/dev/sda", "fs_type": "xfs" } ], "changed": true, "crypts": [ { "backing_device": "/dev/sda", "name": "luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "password": "-", "state": "absent" } ], "failed": false, "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "state": "absent" }, { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=faa4e300-f20d-4b0f-b1d3-e57a1eb71e00", "state": "mounted" } ], "packages": [ "xfsprogs" ], "pools": [], "volumes": [ { "_device": "/dev/sda", "_kernel_device": "/dev/sda", "_mount_id": "UUID=faa4e300-f20d-4b0f-b1d3-e57a1eb71e00", "_raw_device": "/dev/sda", "_raw_kernel_device": "/dev/sda", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks2", "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 10720641024, "state": "present", "thin": null, "thin_pool_name": null, "thin_pool_size": null, "type": "disk", "vdo_pool_size": null } ] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:125 Sunday 09 June 2024 04:16:36 +0000 (0:00:00.015) 0:01:33.830 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:129 Sunday 09 June 2024 04:16:36 +0000 (0:00:00.013) 0:01:33.844 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [ { "_device": "/dev/sda", "_kernel_device": "/dev/sda", "_mount_id": "UUID=faa4e300-f20d-4b0f-b1d3-e57a1eb71e00", "_raw_device": "/dev/sda", "_raw_kernel_device": "/dev/sda", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks2", "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 10720641024, "state": "present", "thin": null, "thin_pool_name": null, "thin_pool_size": null, "type": "disk", "vdo_pool_size": null } ] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:145 Sunday 09 June 2024 04:16:36 +0000 (0:00:00.014) 0:01:33.858 *********** changed: [sut] => (item={'src': '/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621', 'path': '/opt/test1', 'state': 'absent', 'fstype': 'xfs'}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "state": "absent" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/luks-7b2ca059-4e70-41d0-a900-338aa36c8621" } TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:157 Sunday 09 June 2024 04:16:36 +0000 (0:00:00.177) 0:01:34.036 *********** ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:162 Sunday 09 June 2024 04:16:37 +0000 (0:00:00.486) 0:01:34.523 *********** changed: [sut] => (item={'src': 'UUID=faa4e300-f20d-4b0f-b1d3-e57a1eb71e00', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=faa4e300-f20d-4b0f-b1d3-e57a1eb71e00", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "UUID=faa4e300-f20d-4b0f-b1d3-e57a1eb71e00" } TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Sunday 09 June 2024 04:16:37 +0000 (0:00:00.190) 0:01:34.713 *********** skipping: [sut] => (item={'src': 'UUID=faa4e300-f20d-4b0f-b1d3-e57a1eb71e00', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=faa4e300-f20d-4b0f-b1d3-e57a1eb71e00", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:189 Sunday 09 June 2024 04:16:37 +0000 (0:00:00.017) 0:01:34.731 *********** ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:197 Sunday 09 June 2024 04:16:37 +0000 (0:00:00.485) 0:01:35.216 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906581.7241325, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "50b23b03e6f1d3b232c081c4d07b8090b7698901", "ctime": 1717906580.336127, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 390070530, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0600", "mtime": 1717906580.336127, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 53, "uid": 0, "version": "2841740863", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:202 Sunday 09 June 2024 04:16:38 +0000 (0:00:00.188) 0:01:35.405 *********** changed: [sut] => (item={'backing_device': '/dev/sda', 'name': 'luks-7b2ca059-4e70-41d0-a900-338aa36c8621', 'password': '-', 'state': 'absent'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sda", "name": "luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "password": "-", "state": "absent" }, "found": 1 } MSG: 1 line(s) removed TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:224 Sunday 09 June 2024 04:16:38 +0000 (0:00:00.183) 0:01:35.589 *********** ok: [sut] TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:134 Sunday 09 June 2024 04:16:38 +0000 (0:00:00.588) 0:01:36.178 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:2 Sunday 09 June 2024 04:16:38 +0000 (0:00:00.024) 0:01:36.202 *********** skipping: [sut] => {} TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:7 Sunday 09 June 2024 04:16:38 +0000 (0:00:00.034) 0:01:36.237 *********** ok: [sut] => { "_storage_volumes_list": [ { "_device": "/dev/sda", "_kernel_device": "/dev/sda", "_mount_id": "UUID=faa4e300-f20d-4b0f-b1d3-e57a1eb71e00", "_raw_device": "/dev/sda", "_raw_kernel_device": "/dev/sda", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks2", "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 10720641024, "state": "present", "thin": null, "thin_pool_name": null, "thin_pool_size": null, "type": "disk", "vdo_pool_size": null } ] } TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:15 Sunday 09 June 2024 04:16:38 +0000 (0:00:00.016) 0:01:36.253 *********** ok: [sut] => { "changed": false, "info": { "/dev/sda": { "fstype": "xfs", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "faa4e300-f20d-4b0f-b1d3-e57a1eb71e00" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "xfs", "label": "", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "82a26900-2b36-49b9-bc49-f4d2f0bb14b8" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:20 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.176) 0:01:36.430 *********** ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002548", "end": "2024-06-09 04:16:39.231946", "rc": 0, "start": "2024-06-09 04:16:39.229398" } STDOUT: # # /etc/fstab # Created by anaconda on Tue May 28 13:15:44 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=82a26900-2b36-49b9-bc49-f4d2f0bb14b8 / xfs defaults 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 UUID=faa4e300-f20d-4b0f-b1d3-e57a1eb71e00 /opt/test1 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:25 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.173) 0:01:36.603 *********** ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003004", "end": "2024-06-09 04:16:39.408824", "failed_when_result": false, "rc": 0, "start": "2024-06-09 04:16:39.405820" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:34 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.178) 0:01:36.781 *********** TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:44 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.011) 0:01:36.793 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml for sut TASK [Set storage volume test variables] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:2 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.023) 0:01:36.816 *********** ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for {{ storage_test_volume_subset }}] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:21 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.015) 0:01:36.832 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml for sut TASK [Get expected mount device based on device type] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:7 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.052) 0:01:36.884 *********** ok: [sut] => { "ansible_facts": { "storage_test_device_path": "/dev/sda" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:16 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.016) 0:01:36.901 *********** ok: [sut] => { "ansible_facts": { "storage_test_mount_device_matches": [ { "block_available": 2578630, "block_size": 4096, "block_total": 2605056, "block_used": 26426, "device": "/dev/sda", "fstype": "xfs", "inode_available": 5242877, "inode_total": 5242880, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 10562068480, "size_total": 10670309376, "uuid": "faa4e300-f20d-4b0f-b1d3-e57a1eb71e00" } ], "storage_test_mount_expected_match_count": "1", "storage_test_mount_point_matches": [ { "block_available": 2578630, "block_size": 4096, "block_total": 2605056, "block_used": 26426, "device": "/dev/sda", "fstype": "xfs", "inode_available": 5242877, "inode_total": 5242880, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 10562068480, "size_total": 10670309376, "uuid": "faa4e300-f20d-4b0f-b1d3-e57a1eb71e00" } ], "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:38 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.019) 0:01:36.920 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:51 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.012) 0:01:36.933 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the current mount state by mount point] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:63 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.015) 0:01:36.949 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:71 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.014) 0:01:36.963 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:83 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.012) 0:01:36.976 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:95 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.013) 0:01:36.989 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the mount fs type] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:110 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.012) 0:01:37.001 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Get path of test volume device] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:122 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.016) 0:01:37.018 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:128 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.012) 0:01:37.030 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:134 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.012) 0:01:37.042 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:146 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.012) 0:01:37.055 *********** ok: [sut] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_device_matches": null, "storage_test_mount_expected_match_count": null, "storage_test_mount_point_matches": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:2 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.012) 0:01:37.067 *********** ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "UUID=faa4e300-f20d-4b0f-b1d3-e57a1eb71e00 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:40 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.025) 0:01:37.093 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:48 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.015) 0:01:37.108 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:58 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.014) 0:01:37.123 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:71 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.012) 0:01:37.136 *********** ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml:3 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.011) 0:01:37.147 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml:12 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.019) 0:01:37.166 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:3 Sunday 09 June 2024 04:16:39 +0000 (0:00:00.018) 0:01:37.185 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906596.3751905, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1717906596.3751905, "dev": 5, "device_type": 2048, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 446, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/blockdevice", "mode": "0660", "mtime": 1717906596.3751905, "nlink": 1, "path": "/dev/sda", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:9 Sunday 09 June 2024 04:16:40 +0000 (0:00:00.178) 0:01:37.363 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:16 Sunday 09 June 2024 04:16:40 +0000 (0:00:00.016) 0:01:37.380 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:24 Sunday 09 June 2024 04:16:40 +0000 (0:00:00.012) 0:01:37.392 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:30 Sunday 09 June 2024 04:16:40 +0000 (0:00:00.014) 0:01:37.407 *********** ok: [sut] => { "ansible_facts": { "st_volume_type": "disk" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:34 Sunday 09 June 2024 04:16:40 +0000 (0:00:00.013) 0:01:37.421 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:39 Sunday 09 June 2024 04:16:40 +0000 (0:00:00.012) 0:01:37.433 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:3 Sunday 09 June 2024 04:16:40 +0000 (0:00:00.014) 0:01:37.447 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:10 Sunday 09 June 2024 04:16:40 +0000 (0:00:00.012) 0:01:37.460 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:17 Sunday 09 June 2024 04:16:40 +0000 (0:00:00.865) 0:01:38.325 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:23 Sunday 09 June 2024 04:16:40 +0000 (0:00:00.012) 0:01:38.338 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:32 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.351 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:45 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.017) 0:01:38.368 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:51 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.034) 0:01:38.402 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:56 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.013) 0:01:38.415 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:69 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.428 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:81 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.440 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:94 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.453 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:106 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.017) 0:01:38.470 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:114 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.018) 0:01:38.488 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:122 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.501 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:131 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.513 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:140 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.525 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:8 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.011) 0:01:38.537 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:14 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.011) 0:01:38.548 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:21 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.561 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:28 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.574 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:35 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.011) 0:01:38.585 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:45 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.598 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:54 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.610 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:63 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.011) 0:01:38.622 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:72 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.634 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:81 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.647 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:3 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.660 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested size of the volume] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:11 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.672 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:20 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.013) 0:01:38.686 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:28 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.698 *********** ok: [sut] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Get the size of parent/pool device] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:32 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.013) 0:01:38.712 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show test pool] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:46 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.013) 0:01:38.726 *********** skipping: [sut] => {} TASK [Show test blockinfo] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:50 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.013) 0:01:38.739 *********** skipping: [sut] => {} TASK [Show test pool size] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:54 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.752 *********** skipping: [sut] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:58 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.764 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:68 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.777 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:72 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.790 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:77 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.802 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:83 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.815 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:88 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.011) 0:01:38.826 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:96 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.011) 0:01:38.837 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:104 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.850 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:109 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.862 *********** skipping: [sut] => {} TASK [Show volume thin pool size] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:113 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.875 *********** skipping: [sut] => {} TASK [Show test volume size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:117 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.011) 0:01:38.886 *********** skipping: [sut] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:121 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.011) 0:01:38.897 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:129 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.011) 0:01:38.909 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:138 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.921 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:142 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.013) 0:01:38.935 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:150 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.947 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:156 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.960 *********** ok: [sut] => { "storage_test_actual_size": { "changed": false, "skip_reason": "Conditional result was False", "skipped": true } } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:160 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.013) 0:01:38.974 *********** ok: [sut] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Assert expected size is actual size] ************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:164 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:38.986 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:5 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.013) 0:01:39.000 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:13 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.013) 0:01:39.014 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:18 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:39.026 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV cache size] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:27 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:39.039 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:35 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:39.051 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:41 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:39.064 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:47 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.012) 0:01:39.076 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:27 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.013) 0:01:39.090 *********** ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:54 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.011) 0:01:39.101 *********** ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Create a file] *********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/create-test-file.yml:12 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.011) 0:01:39.113 *********** changed: [sut] => { "changed": true, "dest": "/opt/test1/quux", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "secontext": "unconfined_u:object_r:unlabeled_t:s0", "size": 0, "state": "file", "uid": 0 } TASK [Test for correct handling of safe_mode] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:140 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.181) 0:01:39.294 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml for sut TASK [Store global variable value copy] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:4 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.024) 0:01:39.319 *********** ok: [sut] => { "ansible_facts": { "storage_pools_global": [], "storage_safe_mode_global": true, "storage_volumes_global": [] }, "changed": false } TASK [Verify role raises correct error] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:10 Sunday 09 June 2024 04:16:41 +0000 (0:00:00.015) 0:01:39.335 *********** TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Sunday 09 June 2024 04:16:42 +0000 (0:00:00.017) 0:01:39.352 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Sunday 09 June 2024 04:16:42 +0000 (0:00:00.016) 0:01:39.369 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Sunday 09 June 2024 04:16:42 +0000 (0:00:00.014) 0:01:39.383 *********** skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [linux-system-roles.storage : Check if system is ostree] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:26 Sunday 09 June 2024 04:16:42 +0000 (0:00:00.031) 0:01:39.415 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set flag to indicate system is ostree] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:31 Sunday 09 June 2024 04:16:42 +0000 (0:00:00.013) 0:01:39.428 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Sunday 09 June 2024 04:16:42 +0000 (0:00:00.036) 0:01:39.465 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Sunday 09 June 2024 04:16:42 +0000 (0:00:00.012) 0:01:39.477 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Sunday 09 June 2024 04:16:42 +0000 (0:00:00.011) 0:01:39.488 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Sunday 09 June 2024 04:16:42 +0000 (0:00:00.025) 0:01:39.514 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kmod-kvdo libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet vdo xfsprogs TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:11 Sunday 09 June 2024 04:16:43 +0000 (0:00:00.867) 0:01:40.382 *********** ok: [sut] => { "storage_pools": [] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:16 Sunday 09 June 2024 04:16:43 +0000 (0:00:00.016) 0:01:40.398 *********** ok: [sut] => { "storage_volumes": [ { "disks": [ "sda" ], "encryption": true, "encryption_password": "yabbadabbadoo", "mount_point": "/opt/test1", "name": "foo", "type": "disk" } ] } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:21 Sunday 09 June 2024 04:16:43 +0000 (0:00:00.015) 0:01:40.414 *********** ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "cryptsetup" ], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:34 Sunday 09 June 2024 04:16:43 +0000 (0:00:00.876) 0:01:41.290 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for sut TASK [linux-system-roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Sunday 09 June 2024 04:16:43 +0000 (0:00:00.023) 0:01:41.314 *********** TASK [linux-system-roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Sunday 09 June 2024 04:16:43 +0000 (0:00:00.011) 0:01:41.325 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable COPRs] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:20 Sunday 09 June 2024 04:16:43 +0000 (0:00:00.013) 0:01:41.339 *********** TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:41 Sunday 09 June 2024 04:16:44 +0000 (0:00:00.011) 0:01:41.350 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup kpartx TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 Sunday 09 June 2024 04:16:44 +0000 (0:00:00.877) 0:01:42.227 *********** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-cryptsetup@luks\\x2d7b2ca059\\x2d4e70\\x2d41d0\\x2da900\\x2d338aa36c8621.service": { "name": "systemd-cryptsetup@luks\\x2d7b2ca059\\x2d4e70\\x2d41d0\\x2da900\\x2d338aa36c8621.service", "source": "systemd", "state": "stopped", "status": "generated" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:64 Sunday 09 June 2024 04:16:46 +0000 (0:00:01.471) 0:01:43.699 *********** ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [ "systemd-cryptsetup@luks\\x2d7b2ca059\\x2d4e70\\x2d41d0\\x2da900\\x2d338aa36c8621.service" ] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:78 Sunday 09 June 2024 04:16:46 +0000 (0:00:00.021) 0:01:43.720 *********** changed: [sut] => (item=systemd-cryptsetup@luks\x2d7b2ca059\x2d4e70\x2d41d0\x2da900\x2d338aa36c8621.service) => { "ansible_loop_var": "item", "changed": true, "item": "systemd-cryptsetup@luks\\x2d7b2ca059\\x2d4e70\\x2d41d0\\x2da900\\x2d338aa36c8621.service", "name": "systemd-cryptsetup@luks\\x2d7b2ca059\\x2d4e70\\x2d41d0\\x2da900\\x2d338aa36c8621.service", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "cryptsetup-pre.target systemd-journald.socket dev-sda.device systemd-udevd-kernel.socket \"system-systemd\\\\x2dcryptsetup.slice\"", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "cryptsetup.target umount.target \"blockdev@dev-mapper-luks\\\\x2d7b2ca059\\\\x2d4e70\\\\x2d41d0\\\\x2da900\\\\x2d338aa36c8621.target\"", "BindsTo": "dev-sda.device", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "umount.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "no", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Cryptography Setup for luks-7b2ca059-4e70-41d0-a900-338aa36c8621", "DevicePolicy": "auto", "Documentation": "\"man:crypttab(5)\" \"man:systemd-cryptsetup-generator(8)\" \"man:systemd-cryptsetup@.service(8)\"", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup attach luks-7b2ca059-4e70-41d0-a900-338aa36c8621 /dev/sda - ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup attach luks-7b2ca059-4e70-41d0-a900-338aa36c8621 /dev/sda - ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup detach luks-7b2ca059-4e70-41d0-a900-338aa36c8621 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup detach luks-7b2ca059-4e70-41d0-a900-338aa36c8621 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/systemd-cryptsetup@luks\\x2d7b2ca059\\x2d4e70\\x2d41d0\\x2da900\\x2d338aa36c8621.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "systemd-cryptsetup@luks\\x2d7b2ca059\\x2d4e70\\x2d41d0\\x2da900\\x2d338aa36c8621.service", "IgnoreOnIsolate": "yes", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "shared", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13972", "LimitNPROCSoft": "13972", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13972", "LimitSIGPENDINGSoft": "13972", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "\"systemd-cryptsetup@luks\\\\x2d7b2ca059\\\\x2d4e70\\\\x2d41d0\\\\x2da900\\\\x2d338aa36c8621.service\"", "NeedDaemonReload": "yes", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "500", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "RequiredBy": "cryptsetup.target", "Requires": "\"system-systemd\\\\x2dcryptsetup.slice\"", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system-systemd\\x2dcryptsetup.slice", "SourcePath": "/etc/crypttab", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sun 2024-06-09 04:16:37 UTC", "StateChangeTimestampMonotonic": "538517572", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22356", "TimeoutAbortUSec": "infinity", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "infinity", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "\"blockdev@dev-mapper-luks\\\\x2d7b2ca059\\\\x2d4e70\\\\x2d41d0\\\\x2da900\\\\x2d338aa36c8621.target\"", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 Sunday 09 June 2024 04:16:46 +0000 (0:00:00.501) 0:01:44.222 *********** fatal: [sut]: FAILED! => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } MSG: cannot remove existing formatting on device 'sda' in safe mode due to adding encryption TASK [linux-system-roles.storage : Failed message] ***************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:106 Sunday 09 June 2024 04:16:47 +0000 (0:00:00.899) 0:01:45.122 *********** fatal: [sut]: FAILED! => { "changed": false } MSG: {'msg': "cannot remove existing formatting on device 'sda' in safe mode due to adding encryption", 'changed': False, 'actions': [], 'leaves': [], 'mounts': [], 'crypts': [], 'pools': [], 'volumes': [], 'packages': [], 'failed': True, 'invocation': {'module_args': {'pools': [], 'volumes': [{'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'foo', 'raid_level': None, 'size': 10737418240, 'state': 'present', 'type': 'disk', 'disks': ['sda'], 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_stripe_size': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': []}], 'use_partitions': None, 'disklabel_type': None, 'pool_defaults': {'state': 'present', 'type': 'lvm', 'disks': [], 'volumes': [], 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'shared': False}, 'volume_defaults': {'state': 'present', 'type': 'lvm', 'size': 0, 'disks': [], 'fs_type': 'xfs', 'fs_label': '', 'fs_create_options': '', 'fs_overwrite_existing': True, 'mount_point': '', 'mount_options': 'defaults', 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_stripe_size': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': []}, 'safe_mode': True, 'packages_only': False, 'diskvolume_mkfs_option_map': {}}}, '_ansible_no_log': False} TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Sunday 09 June 2024 04:16:47 +0000 (0:00:00.016) 0:01:45.139 *********** changed: [sut] => (item=systemd-cryptsetup@luks\x2d7b2ca059\x2d4e70\x2d41d0\x2da900\x2d338aa36c8621.service) => { "ansible_loop_var": "item", "changed": true, "item": "systemd-cryptsetup@luks\\x2d7b2ca059\\x2d4e70\\x2d41d0\\x2da900\\x2d338aa36c8621.service", "name": "systemd-cryptsetup@luks\\x2d7b2ca059\\x2d4e70\\x2d41d0\\x2da900\\x2d338aa36c8621.service", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "no", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "systemd-cryptsetup@luks\\x2d7b2ca059\\x2d4e70\\x2d41d0\\x2da900\\x2d338aa36c8621.service", "DevicePolicy": "auto", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/etc/systemd/system/systemd-cryptsetup@luks\\x2d7b2ca059\\x2d4e70\\x2d41d0\\x2da900\\x2d338aa36c8621.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "systemd-cryptsetup@luks\\x2d7b2ca059\\x2d4e70\\x2d41d0\\x2da900\\x2d338aa36c8621.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "463069184", "LimitMEMLOCKSoft": "463069184", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "1073741816", "LimitNOFILESoft": "1073741816", "LimitNPROC": "13972", "LimitNPROCSoft": "13972", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13972", "LimitSIGPENDINGSoft": "13972", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadError": "org.freedesktop.systemd1.UnitMasked \"Unit systemd-cryptsetup@luks\\x2d7b2ca059\\x2d4e70\\x2d41d0\\x2da900\\x2d338aa36c8621.service is masked.\"", "LoadState": "masked", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "\"systemd-cryptsetup@luks\\\\x2d7b2ca059\\\\x2d4e70\\\\x2d41d0\\\\x2da900\\\\x2d338aa36c8621.service\"", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "inherit", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22356", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "masked", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [Check that we failed in the role] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:29 Sunday 09 June 2024 04:16:48 +0000 (0:00:00.507) 0:01:45.646 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the blivet output and error message are correct] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:34 Sunday 09 June 2024 04:16:48 +0000 (0:00:00.014) 0:01:45.661 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify correct exception or error message] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:45 Sunday 09 June 2024 04:16:48 +0000 (0:00:00.018) 0:01:45.679 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Stat the file] *********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-data-preservation.yml:11 Sunday 09 June 2024 04:16:48 +0000 (0:00:00.012) 0:01:45.691 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906601.9232123, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1717906601.9232123, "dev": 2048, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0644", "mtime": 1717906601.9232123, "nlink": 1, "path": "/opt/test1/quux", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 0, "uid": 0, "version": "1936809335", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Assert file presence] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-data-preservation.yml:16 Sunday 09 June 2024 04:16:48 +0000 (0:00:00.175) 0:01:45.867 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Add encryption to the volume] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:160 Sunday 09 June 2024 04:16:48 +0000 (0:00:00.014) 0:01:45.882 *********** TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Sunday 09 June 2024 04:16:48 +0000 (0:00:00.061) 0:01:45.943 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Sunday 09 June 2024 04:16:48 +0000 (0:00:00.017) 0:01:45.961 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Sunday 09 June 2024 04:16:48 +0000 (0:00:00.014) 0:01:45.975 *********** skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [linux-system-roles.storage : Check if system is ostree] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:26 Sunday 09 June 2024 04:16:48 +0000 (0:00:00.034) 0:01:46.010 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set flag to indicate system is ostree] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:31 Sunday 09 June 2024 04:16:48 +0000 (0:00:00.012) 0:01:46.023 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Sunday 09 June 2024 04:16:48 +0000 (0:00:00.012) 0:01:46.036 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Sunday 09 June 2024 04:16:48 +0000 (0:00:00.011) 0:01:46.047 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Sunday 09 June 2024 04:16:48 +0000 (0:00:00.011) 0:01:46.058 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Sunday 09 June 2024 04:16:48 +0000 (0:00:00.025) 0:01:46.083 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kmod-kvdo libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet vdo xfsprogs TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:11 Sunday 09 June 2024 04:16:49 +0000 (0:00:00.874) 0:01:46.958 *********** ok: [sut] => { "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined" } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:16 Sunday 09 June 2024 04:16:49 +0000 (0:00:00.014) 0:01:46.972 *********** ok: [sut] => { "storage_volumes": [ { "disks": [ "sda" ], "encryption": true, "encryption_password": "yabbadabbadoo", "mount_point": "/opt/test1", "name": "foo", "type": "disk" } ] } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:21 Sunday 09 June 2024 04:16:49 +0000 (0:00:00.014) 0:01:46.987 *********** ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "cryptsetup" ], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:34 Sunday 09 June 2024 04:16:50 +0000 (0:00:00.862) 0:01:47.849 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for sut TASK [linux-system-roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Sunday 09 June 2024 04:16:50 +0000 (0:00:00.021) 0:01:47.871 *********** TASK [linux-system-roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Sunday 09 June 2024 04:16:50 +0000 (0:00:00.011) 0:01:47.882 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable COPRs] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:20 Sunday 09 June 2024 04:16:50 +0000 (0:00:00.012) 0:01:47.895 *********** TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:41 Sunday 09 June 2024 04:16:50 +0000 (0:00:00.011) 0:01:47.906 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup kpartx TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 Sunday 09 June 2024 04:16:51 +0000 (0:00:00.867) 0:01:48.774 *********** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:64 Sunday 09 June 2024 04:16:52 +0000 (0:00:01.484) 0:01:50.258 *********** ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:78 Sunday 09 June 2024 04:16:52 +0000 (0:00:00.020) 0:01:50.279 *********** TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 Sunday 09 June 2024 04:16:52 +0000 (0:00:00.011) 0:01:50.290 *********** changed: [sut] => { "actions": [ { "action": "destroy format", "device": "/dev/sda", "fs_type": "xfs" }, { "action": "create format", "device": "/dev/sda", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "fs_type": "xfs" } ], "changed": true, "crypts": [ { "backing_device": "/dev/sda", "name": "luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "password": "-", "state": "present" } ], "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test1", "src": "UUID=faa4e300-f20d-4b0f-b1d3-e57a1eb71e00", "state": "absent" }, { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "state": "mounted" } ], "packages": [ "cryptsetup", "xfsprogs" ], "pools": [], "volumes": [ { "_device": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "_raw_device": "/dev/sda", "_raw_kernel_device": "/dev/sda", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 10737418240, "state": "present", "thin": null, "thin_pool_name": null, "thin_pool_size": null, "type": "disk", "vdo_pool_size": null } ] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:98 Sunday 09 June 2024 04:17:02 +0000 (0:00:09.863) 0:02:00.154 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Sunday 09 June 2024 04:17:02 +0000 (0:00:00.012) 0:02:00.167 *********** TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:116 Sunday 09 June 2024 04:17:02 +0000 (0:00:00.011) 0:02:00.178 *********** ok: [sut] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/sda", "fs_type": "xfs" }, { "action": "create format", "device": "/dev/sda", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "fs_type": "xfs" } ], "changed": true, "crypts": [ { "backing_device": "/dev/sda", "name": "luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "password": "-", "state": "present" } ], "failed": false, "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test1", "src": "UUID=faa4e300-f20d-4b0f-b1d3-e57a1eb71e00", "state": "absent" }, { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "state": "mounted" } ], "packages": [ "cryptsetup", "xfsprogs" ], "pools": [], "volumes": [ { "_device": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "_raw_device": "/dev/sda", "_raw_kernel_device": "/dev/sda", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 10737418240, "state": "present", "thin": null, "thin_pool_name": null, "thin_pool_size": null, "type": "disk", "vdo_pool_size": null } ] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:125 Sunday 09 June 2024 04:17:02 +0000 (0:00:00.014) 0:02:00.193 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:129 Sunday 09 June 2024 04:17:02 +0000 (0:00:00.014) 0:02:00.207 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [ { "_device": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "_raw_device": "/dev/sda", "_raw_kernel_device": "/dev/sda", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 10737418240, "state": "present", "thin": null, "thin_pool_name": null, "thin_pool_size": null, "type": "disk", "vdo_pool_size": null } ] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:145 Sunday 09 June 2024 04:17:02 +0000 (0:00:00.041) 0:02:00.248 *********** changed: [sut] => (item={'src': 'UUID=faa4e300-f20d-4b0f-b1d3-e57a1eb71e00', 'path': '/opt/test1', 'state': 'absent', 'fstype': 'xfs'}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "fstype": "xfs", "path": "/opt/test1", "src": "UUID=faa4e300-f20d-4b0f-b1d3-e57a1eb71e00", "state": "absent" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "UUID=faa4e300-f20d-4b0f-b1d3-e57a1eb71e00" } TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:157 Sunday 09 June 2024 04:17:03 +0000 (0:00:00.180) 0:02:00.428 *********** ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:162 Sunday 09 June 2024 04:17:03 +0000 (0:00:00.490) 0:02:00.919 *********** changed: [sut] => (item={'src': '/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce" } TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Sunday 09 June 2024 04:17:03 +0000 (0:00:00.195) 0:02:01.114 *********** skipping: [sut] => (item={'src': '/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:189 Sunday 09 June 2024 04:17:03 +0000 (0:00:00.017) 0:02:01.132 *********** ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:197 Sunday 09 June 2024 04:17:04 +0000 (0:00:00.475) 0:02:01.608 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906599.4082024, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1717906598.2111976, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 448790723, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1717906598.2111976, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "3838156660", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:202 Sunday 09 June 2024 04:17:04 +0000 (0:00:00.177) 0:02:01.786 *********** changed: [sut] => (item={'backing_device': '/dev/sda', 'name': 'luks-ff6bfc93-de52-4449-933e-5bd1452b88ce', 'password': '-', 'state': 'present'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sda", "name": "luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "password": "-", "state": "present" } } MSG: line added TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:224 Sunday 09 June 2024 04:17:04 +0000 (0:00:00.182) 0:02:01.968 *********** ok: [sut] TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:173 Sunday 09 June 2024 04:17:05 +0000 (0:00:00.606) 0:02:02.574 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:2 Sunday 09 June 2024 04:17:05 +0000 (0:00:00.027) 0:02:02.602 *********** skipping: [sut] => {} TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:7 Sunday 09 June 2024 04:17:05 +0000 (0:00:00.012) 0:02:02.614 *********** ok: [sut] => { "_storage_volumes_list": [ { "_device": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "_raw_device": "/dev/sda", "_raw_kernel_device": "/dev/sda", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 10737418240, "state": "present", "thin": null, "thin_pool_name": null, "thin_pool_size": null, "type": "disk", "vdo_pool_size": null } ] } TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:15 Sunday 09 June 2024 04:17:05 +0000 (0:00:00.015) 0:02:02.629 *********** ok: [sut] => { "changed": false, "info": { "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce": { "fstype": "xfs", "label": "", "name": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "size": "10G", "type": "crypt", "uuid": "97dc7ad7-9ce1-494d-8279-5d4c912fc997" }, "/dev/sda": { "fstype": "crypto_LUKS", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "ff6bfc93-de52-4449-933e-5bd1452b88ce" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "xfs", "label": "", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "82a26900-2b36-49b9-bc49-f4d2f0bb14b8" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:20 Sunday 09 June 2024 04:17:05 +0000 (0:00:00.174) 0:02:02.804 *********** ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002527", "end": "2024-06-09 04:17:05.605265", "rc": 0, "start": "2024-06-09 04:17:05.602738" } STDOUT: # # /etc/fstab # Created by anaconda on Tue May 28 13:15:44 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=82a26900-2b36-49b9-bc49-f4d2f0bb14b8 / xfs defaults 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce /opt/test1 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:25 Sunday 09 June 2024 04:17:05 +0000 (0:00:00.172) 0:02:02.976 *********** ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003179", "end": "2024-06-09 04:17:05.784828", "failed_when_result": false, "rc": 0, "start": "2024-06-09 04:17:05.781649" } STDOUT: luks-ff6bfc93-de52-4449-933e-5bd1452b88ce /dev/sda - TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:34 Sunday 09 June 2024 04:17:05 +0000 (0:00:00.179) 0:02:03.156 *********** TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:44 Sunday 09 June 2024 04:17:05 +0000 (0:00:00.011) 0:02:03.168 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml for sut TASK [Set storage volume test variables] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:2 Sunday 09 June 2024 04:17:05 +0000 (0:00:00.023) 0:02:03.191 *********** ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for {{ storage_test_volume_subset }}] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:21 Sunday 09 June 2024 04:17:05 +0000 (0:00:00.016) 0:02:03.208 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml for sut TASK [Get expected mount device based on device type] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:7 Sunday 09 June 2024 04:17:05 +0000 (0:00:00.074) 0:02:03.282 *********** ok: [sut] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:16 Sunday 09 June 2024 04:17:05 +0000 (0:00:00.015) 0:02:03.297 *********** ok: [sut] => { "ansible_facts": { "storage_test_mount_device_matches": [ { "block_available": 2574562, "block_size": 4096, "block_total": 2600960, "block_used": 26398, "device": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "fstype": "xfs", "inode_available": 5234685, "inode_total": 5234688, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 10545405952, "size_total": 10653532160, "uuid": "97dc7ad7-9ce1-494d-8279-5d4c912fc997" } ], "storage_test_mount_expected_match_count": "1", "storage_test_mount_point_matches": [ { "block_available": 2574562, "block_size": 4096, "block_total": 2600960, "block_used": 26398, "device": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "fstype": "xfs", "inode_available": 5234685, "inode_total": 5234688, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 10545405952, "size_total": 10653532160, "uuid": "97dc7ad7-9ce1-494d-8279-5d4c912fc997" } ], "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:38 Sunday 09 June 2024 04:17:05 +0000 (0:00:00.019) 0:02:03.317 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:51 Sunday 09 June 2024 04:17:05 +0000 (0:00:00.013) 0:02:03.330 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the current mount state by mount point] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:63 Sunday 09 June 2024 04:17:05 +0000 (0:00:00.015) 0:02:03.346 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:71 Sunday 09 June 2024 04:17:06 +0000 (0:00:00.014) 0:02:03.361 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:83 Sunday 09 June 2024 04:17:06 +0000 (0:00:00.012) 0:02:03.374 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:95 Sunday 09 June 2024 04:17:06 +0000 (0:00:00.012) 0:02:03.386 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the mount fs type] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:110 Sunday 09 June 2024 04:17:06 +0000 (0:00:00.012) 0:02:03.399 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Get path of test volume device] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:122 Sunday 09 June 2024 04:17:06 +0000 (0:00:00.017) 0:02:03.416 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:128 Sunday 09 June 2024 04:17:06 +0000 (0:00:00.012) 0:02:03.429 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:134 Sunday 09 June 2024 04:17:06 +0000 (0:00:00.012) 0:02:03.441 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:146 Sunday 09 June 2024 04:17:06 +0000 (0:00:00.012) 0:02:03.454 *********** ok: [sut] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_device_matches": null, "storage_test_mount_expected_match_count": null, "storage_test_mount_point_matches": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:2 Sunday 09 June 2024 04:17:06 +0000 (0:00:00.011) 0:02:03.465 *********** ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:40 Sunday 09 June 2024 04:17:06 +0000 (0:00:00.026) 0:02:03.491 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:48 Sunday 09 June 2024 04:17:06 +0000 (0:00:00.017) 0:02:03.509 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:58 Sunday 09 June 2024 04:17:06 +0000 (0:00:00.015) 0:02:03.525 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:71 Sunday 09 June 2024 04:17:06 +0000 (0:00:00.012) 0:02:03.537 *********** ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml:3 Sunday 09 June 2024 04:17:06 +0000 (0:00:00.011) 0:02:03.549 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml:12 Sunday 09 June 2024 04:17:06 +0000 (0:00:00.016) 0:02:03.566 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:3 Sunday 09 June 2024 04:17:06 +0000 (0:00:00.017) 0:02:03.584 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906622.4832854, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1717906622.4832854, "dev": 5, "device_type": 2048, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 446, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/blockdevice", "mode": "0660", "mtime": 1717906622.4832854, "nlink": 1, "path": "/dev/sda", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:9 Sunday 09 June 2024 04:17:06 +0000 (0:00:00.179) 0:02:03.763 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:16 Sunday 09 June 2024 04:17:06 +0000 (0:00:00.016) 0:02:03.779 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:24 Sunday 09 June 2024 04:17:06 +0000 (0:00:00.012) 0:02:03.792 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:30 Sunday 09 June 2024 04:17:06 +0000 (0:00:00.014) 0:02:03.807 *********** ok: [sut] => { "ansible_facts": { "st_volume_type": "disk" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:34 Sunday 09 June 2024 04:17:06 +0000 (0:00:00.014) 0:02:03.821 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:39 Sunday 09 June 2024 04:17:06 +0000 (0:00:00.012) 0:02:03.834 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:3 Sunday 09 June 2024 04:17:06 +0000 (0:00:00.014) 0:02:03.849 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906622.7202861, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1717906622.7202861, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 803, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1717906622.7202861, "nlink": 1, "path": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:10 Sunday 09 June 2024 04:17:06 +0000 (0:00:00.181) 0:02:04.030 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:17 Sunday 09 June 2024 04:17:07 +0000 (0:00:00.867) 0:02:04.898 *********** ok: [sut] => { "changed": false, "cmd": [ "cryptsetup", "luksDump", "/dev/sda" ], "delta": "0:00:00.006976", "end": "2024-06-09 04:17:07.708917", "rc": 0, "start": "2024-06-09 04:17:07.701941" } STDOUT: LUKS header information Version: 2 Epoch: 3 Metadata area: 16384 [bytes] Keyslots area: 16744448 [bytes] UUID: ff6bfc93-de52-4449-933e-5bd1452b88ce Label: (no label) Subsystem: (no subsystem) Flags: (no flags) Data segments: 0: crypt offset: 16777216 [bytes] length: (whole device) cipher: aes-xts-plain64 sector: 512 [bytes] Keyslots: 0: luks2 Key: 512 bits Priority: normal Cipher: aes-xts-plain64 Cipher key: 512 bits PBKDF: argon2id Time cost: 4 Memory: 672167 Threads: 2 Salt: af 12 1b 08 34 14 a3 f5 25 ef f5 29 52 c8 7c 0d fa 54 76 ea 69 76 dc 50 f1 41 b8 79 20 52 7f de AF stripes: 4000 AF hash: sha256 Area offset:32768 [bytes] Area length:258048 [bytes] Digest ID: 0 Tokens: Digests: 0: pbkdf2 Hash: sha256 Iterations: 103206 Salt: 10 d4 12 4f 16 c9 cd a1 0e 3a b1 6f 7e b9 78 04 79 d3 13 c3 4d 39 83 40 13 47 e5 45 71 7a df c7 Digest: 5a 73 a9 6a 91 c1 0f 6a 79 9c 3d 4c 5a f8 3c 89 09 6a c5 06 1a fd ac 3a 49 4e d4 9c b1 ed cd c8 TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:23 Sunday 09 June 2024 04:17:07 +0000 (0:00:00.185) 0:02:05.083 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:32 Sunday 09 June 2024 04:17:07 +0000 (0:00:00.017) 0:02:05.100 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:45 Sunday 09 June 2024 04:17:07 +0000 (0:00:00.019) 0:02:05.120 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:51 Sunday 09 June 2024 04:17:07 +0000 (0:00:00.016) 0:02:05.136 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:56 Sunday 09 June 2024 04:17:07 +0000 (0:00:00.015) 0:02:05.152 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:69 Sunday 09 June 2024 04:17:07 +0000 (0:00:00.013) 0:02:05.166 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:81 Sunday 09 June 2024 04:17:07 +0000 (0:00:00.013) 0:02:05.179 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:94 Sunday 09 June 2024 04:17:07 +0000 (0:00:00.012) 0:02:05.192 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [ "luks-ff6bfc93-de52-4449-933e-5bd1452b88ce /dev/sda -" ], "_storage_test_expected_crypttab_entries": "1", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:106 Sunday 09 June 2024 04:17:07 +0000 (0:00:00.015) 0:02:05.208 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:114 Sunday 09 June 2024 04:17:07 +0000 (0:00:00.014) 0:02:05.223 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:122 Sunday 09 June 2024 04:17:07 +0000 (0:00:00.015) 0:02:05.238 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:131 Sunday 09 June 2024 04:17:07 +0000 (0:00:00.018) 0:02:05.256 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:140 Sunday 09 June 2024 04:17:07 +0000 (0:00:00.017) 0:02:05.274 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:8 Sunday 09 June 2024 04:17:07 +0000 (0:00:00.012) 0:02:05.287 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:14 Sunday 09 June 2024 04:17:07 +0000 (0:00:00.012) 0:02:05.299 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:21 Sunday 09 June 2024 04:17:07 +0000 (0:00:00.012) 0:02:05.312 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:28 Sunday 09 June 2024 04:17:07 +0000 (0:00:00.012) 0:02:05.324 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:35 Sunday 09 June 2024 04:17:07 +0000 (0:00:00.013) 0:02:05.337 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:45 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.034) 0:02:05.372 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:54 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.013) 0:02:05.385 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:63 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.012) 0:02:05.398 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:72 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.012) 0:02:05.410 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:81 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.012) 0:02:05.423 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:3 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.012) 0:02:05.435 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested size of the volume] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:11 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.014) 0:02:05.450 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:20 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.014) 0:02:05.464 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:28 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.013) 0:02:05.478 *********** ok: [sut] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Get the size of parent/pool device] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:32 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.012) 0:02:05.491 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show test pool] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:46 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.013) 0:02:05.504 *********** skipping: [sut] => {} TASK [Show test blockinfo] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:50 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.013) 0:02:05.518 *********** skipping: [sut] => {} TASK [Show test pool size] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:54 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.013) 0:02:05.532 *********** skipping: [sut] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:58 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.013) 0:02:05.546 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:68 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.013) 0:02:05.559 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:72 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.012) 0:02:05.572 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:77 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.012) 0:02:05.585 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:83 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.012) 0:02:05.597 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:88 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.013) 0:02:05.610 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:96 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.011) 0:02:05.622 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:104 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.012) 0:02:05.635 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:109 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.012) 0:02:05.647 *********** skipping: [sut] => {} TASK [Show volume thin pool size] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:113 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.012) 0:02:05.660 *********** skipping: [sut] => {} TASK [Show test volume size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:117 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.011) 0:02:05.672 *********** skipping: [sut] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:121 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.014) 0:02:05.686 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:129 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.012) 0:02:05.699 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:138 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.012) 0:02:05.711 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:142 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.012) 0:02:05.724 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:150 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.011) 0:02:05.736 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:156 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.012) 0:02:05.749 *********** ok: [sut] => { "storage_test_actual_size": { "changed": false, "skip_reason": "Conditional result was False", "skipped": true } } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:160 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.015) 0:02:05.764 *********** ok: [sut] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Assert expected size is actual size] ************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:164 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.012) 0:02:05.777 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:5 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.013) 0:02:05.791 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:13 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.012) 0:02:05.804 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:18 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.012) 0:02:05.816 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV cache size] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:27 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.012) 0:02:05.829 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:35 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.014) 0:02:05.843 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:41 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.012) 0:02:05.856 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:47 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.012) 0:02:05.869 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:27 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.012) 0:02:05.882 *********** ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:54 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.011) 0:02:05.893 *********** ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Test for correct handling of new encrypted volume w/ no key] ************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:180 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.011) 0:02:05.905 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml for sut TASK [Store global variable value copy] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:4 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.028) 0:02:05.933 *********** ok: [sut] => { "ansible_facts": { "storage_pools_global": [], "storage_safe_mode_global": true, "storage_volumes_global": [] }, "changed": false } TASK [Verify role raises correct error] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:10 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.016) 0:02:05.949 *********** TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.017) 0:02:05.967 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.017) 0:02:05.984 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.014) 0:02:05.999 *********** skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [linux-system-roles.storage : Check if system is ostree] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:26 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.033) 0:02:06.032 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set flag to indicate system is ostree] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:31 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.012) 0:02:06.045 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.012) 0:02:06.057 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.011) 0:02:06.069 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.011) 0:02:06.080 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Sunday 09 June 2024 04:17:08 +0000 (0:00:00.025) 0:02:06.106 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kmod-kvdo libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet vdo xfsprogs TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:11 Sunday 09 June 2024 04:17:09 +0000 (0:00:00.874) 0:02:06.981 *********** ok: [sut] => { "storage_pools": [ { "disks": [ "sda" ], "name": "foo", "type": "partition", "volumes": [ { "encryption": true, "mount_point": "/opt/test1", "name": "test1", "size": "4g", "type": "partition" } ] } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:16 Sunday 09 June 2024 04:17:09 +0000 (0:00:00.016) 0:02:06.997 *********** ok: [sut] => { "storage_volumes": [] } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:21 Sunday 09 June 2024 04:17:09 +0000 (0:00:00.016) 0:02:07.014 *********** ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "cryptsetup" ], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:34 Sunday 09 June 2024 04:17:10 +0000 (0:00:00.960) 0:02:07.974 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for sut TASK [linux-system-roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Sunday 09 June 2024 04:17:10 +0000 (0:00:00.022) 0:02:07.997 *********** TASK [linux-system-roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Sunday 09 June 2024 04:17:10 +0000 (0:00:00.011) 0:02:08.009 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable COPRs] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:20 Sunday 09 June 2024 04:17:10 +0000 (0:00:00.012) 0:02:08.022 *********** TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:41 Sunday 09 June 2024 04:17:10 +0000 (0:00:00.011) 0:02:08.033 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup kpartx TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 Sunday 09 June 2024 04:17:11 +0000 (0:00:00.879) 0:02:08.912 *********** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:64 Sunday 09 June 2024 04:17:13 +0000 (0:00:01.475) 0:02:10.387 *********** ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:78 Sunday 09 June 2024 04:17:13 +0000 (0:00:00.021) 0:02:10.409 *********** TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 Sunday 09 June 2024 04:17:13 +0000 (0:00:00.012) 0:02:10.421 *********** fatal: [sut]: FAILED! => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } MSG: encrypted volume 'test1' missing key/password TASK [linux-system-roles.storage : Failed message] ***************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:106 Sunday 09 June 2024 04:17:14 +0000 (0:00:01.091) 0:02:11.512 *********** fatal: [sut]: FAILED! => { "changed": false } MSG: {'msg': "encrypted volume 'test1' missing key/password", 'changed': False, 'actions': [], 'leaves': [], 'mounts': [], 'crypts': [], 'pools': [], 'volumes': [], 'packages': [], 'failed': True, 'invocation': {'module_args': {'pools': [{'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'partition', 'volumes': [{'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'partition', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None}]}], 'volumes': [], 'use_partitions': None, 'disklabel_type': None, 'pool_defaults': {'state': 'present', 'type': 'lvm', 'disks': [], 'volumes': [], 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'shared': False}, 'volume_defaults': {'state': 'present', 'type': 'lvm', 'size': 0, 'disks': [], 'fs_type': 'xfs', 'fs_label': '', 'fs_create_options': '', 'fs_overwrite_existing': True, 'mount_point': '', 'mount_options': 'defaults', 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_stripe_size': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': []}, 'safe_mode': False, 'packages_only': False, 'diskvolume_mkfs_option_map': {}}}, '_ansible_no_log': False} TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Sunday 09 June 2024 04:17:14 +0000 (0:00:00.016) 0:02:11.529 *********** TASK [Check that we failed in the role] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:29 Sunday 09 June 2024 04:17:14 +0000 (0:00:00.011) 0:02:11.541 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the blivet output and error message are correct] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:34 Sunday 09 June 2024 04:17:14 +0000 (0:00:00.013) 0:02:11.554 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify correct exception or error message] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:45 Sunday 09 June 2024 04:17:14 +0000 (0:00:00.020) 0:02:11.574 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Create an encrypted partition volume w/ default fs] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:199 Sunday 09 June 2024 04:17:14 +0000 (0:00:00.012) 0:02:11.587 *********** TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Sunday 09 June 2024 04:17:14 +0000 (0:00:00.042) 0:02:11.629 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Sunday 09 June 2024 04:17:14 +0000 (0:00:00.017) 0:02:11.647 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Sunday 09 June 2024 04:17:14 +0000 (0:00:00.014) 0:02:11.661 *********** skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [linux-system-roles.storage : Check if system is ostree] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:26 Sunday 09 June 2024 04:17:14 +0000 (0:00:00.059) 0:02:11.721 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set flag to indicate system is ostree] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:31 Sunday 09 June 2024 04:17:14 +0000 (0:00:00.013) 0:02:11.735 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Sunday 09 June 2024 04:17:14 +0000 (0:00:00.012) 0:02:11.747 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Sunday 09 June 2024 04:17:14 +0000 (0:00:00.011) 0:02:11.759 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Sunday 09 June 2024 04:17:14 +0000 (0:00:00.011) 0:02:11.770 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Sunday 09 June 2024 04:17:14 +0000 (0:00:00.025) 0:02:11.795 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kmod-kvdo libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet vdo xfsprogs TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:11 Sunday 09 June 2024 04:17:15 +0000 (0:00:00.861) 0:02:12.657 *********** ok: [sut] => { "storage_pools": [ { "disks": [ "sda" ], "name": "foo", "type": "partition", "volumes": [ { "encryption": true, "encryption_password": "yabbadabbadoo", "mount_point": "/opt/test1", "name": "test1", "size": "4g", "type": "partition" } ] } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:16 Sunday 09 June 2024 04:17:15 +0000 (0:00:00.015) 0:02:12.672 *********** ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:21 Sunday 09 June 2024 04:17:15 +0000 (0:00:00.013) 0:02:12.685 *********** ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "cryptsetup" ], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:34 Sunday 09 June 2024 04:17:16 +0000 (0:00:00.949) 0:02:13.635 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for sut TASK [linux-system-roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Sunday 09 June 2024 04:17:16 +0000 (0:00:00.022) 0:02:13.657 *********** TASK [linux-system-roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Sunday 09 June 2024 04:17:16 +0000 (0:00:00.011) 0:02:13.669 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable COPRs] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:20 Sunday 09 June 2024 04:17:16 +0000 (0:00:00.012) 0:02:13.682 *********** TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:41 Sunday 09 June 2024 04:17:16 +0000 (0:00:00.011) 0:02:13.693 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup kpartx TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 Sunday 09 June 2024 04:17:17 +0000 (0:00:00.854) 0:02:14.548 *********** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:64 Sunday 09 June 2024 04:17:18 +0000 (0:00:01.489) 0:02:16.037 *********** ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:78 Sunday 09 June 2024 04:17:18 +0000 (0:00:00.021) 0:02:16.058 *********** TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 Sunday 09 June 2024 04:17:18 +0000 (0:00:00.011) 0:02:16.070 *********** changed: [sut] => { "actions": [ { "action": "destroy format", "device": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "luks" }, { "action": "create format", "device": "/dev/sda", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sda1", "fs_type": null }, { "action": "create format", "device": "/dev/sda1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "fs_type": "xfs" } ], "changed": true, "crypts": [ { "backing_device": "/dev/sda", "name": "luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "password": "-", "state": "absent" }, { "backing_device": "/dev/sda1", "name": "luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "password": "-", "state": "present" } ], "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "state": "absent" }, { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "state": "mounted" } ], "packages": [ "xfsprogs", "cryptsetup" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "partition", "volumes": [ { "_device": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "_raw_device": "/dev/sda1", "_raw_kernel_device": "/dev/sda1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "partition", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:98 Sunday 09 June 2024 04:17:29 +0000 (0:00:11.222) 0:02:27.293 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Sunday 09 June 2024 04:17:29 +0000 (0:00:00.012) 0:02:27.306 *********** TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:116 Sunday 09 June 2024 04:17:29 +0000 (0:00:00.011) 0:02:27.317 *********** ok: [sut] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "luks" }, { "action": "create format", "device": "/dev/sda", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sda1", "fs_type": null }, { "action": "create format", "device": "/dev/sda1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "fs_type": "xfs" } ], "changed": true, "crypts": [ { "backing_device": "/dev/sda", "name": "luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "password": "-", "state": "absent" }, { "backing_device": "/dev/sda1", "name": "luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "password": "-", "state": "present" } ], "failed": false, "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "state": "absent" }, { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "state": "mounted" } ], "packages": [ "xfsprogs", "cryptsetup" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "partition", "volumes": [ { "_device": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "_raw_device": "/dev/sda1", "_raw_kernel_device": "/dev/sda1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "partition", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:125 Sunday 09 June 2024 04:17:29 +0000 (0:00:00.016) 0:02:27.333 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "partition", "volumes": [ { "_device": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "_raw_device": "/dev/sda1", "_raw_kernel_device": "/dev/sda1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "partition", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:129 Sunday 09 June 2024 04:17:29 +0000 (0:00:00.015) 0:02:27.349 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:145 Sunday 09 June 2024 04:17:30 +0000 (0:00:00.014) 0:02:27.363 *********** changed: [sut] => (item={'src': '/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce', 'path': '/opt/test1', 'state': 'absent', 'fstype': 'xfs'}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "state": "absent" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/luks-ff6bfc93-de52-4449-933e-5bd1452b88ce" } TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:157 Sunday 09 June 2024 04:17:30 +0000 (0:00:00.179) 0:02:27.542 *********** ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:162 Sunday 09 June 2024 04:17:30 +0000 (0:00:00.488) 0:02:28.031 *********** changed: [sut] => (item={'src': '/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9" } TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Sunday 09 June 2024 04:17:30 +0000 (0:00:00.191) 0:02:28.222 *********** skipping: [sut] => (item={'src': '/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:189 Sunday 09 June 2024 04:17:30 +0000 (0:00:00.018) 0:02:28.240 *********** ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:197 Sunday 09 June 2024 04:17:31 +0000 (0:00:00.488) 0:02:28.729 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906625.7842968, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "591ad6ff735c6c11d3a1008e8d9b9c76d92a2673", "ctime": 1717906624.5922928, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 507510984, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0600", "mtime": 1717906624.5912926, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 53, "uid": 0, "version": "603334294", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:202 Sunday 09 June 2024 04:17:31 +0000 (0:00:00.182) 0:02:28.912 *********** changed: [sut] => (item={'backing_device': '/dev/sda', 'name': 'luks-ff6bfc93-de52-4449-933e-5bd1452b88ce', 'password': '-', 'state': 'absent'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sda", "name": "luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "password": "-", "state": "absent" }, "found": 1 } MSG: 1 line(s) removed changed: [sut] => (item={'backing_device': '/dev/sda1', 'name': 'luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9', 'password': '-', 'state': 'present'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sda1", "name": "luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "password": "-", "state": "present" } } MSG: line added TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:224 Sunday 09 June 2024 04:17:31 +0000 (0:00:00.354) 0:02:29.266 *********** ok: [sut] TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:216 Sunday 09 June 2024 04:17:32 +0000 (0:00:00.659) 0:02:29.925 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:2 Sunday 09 June 2024 04:17:32 +0000 (0:00:00.031) 0:02:29.956 *********** ok: [sut] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "partition", "volumes": [ { "_device": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "_raw_device": "/dev/sda1", "_raw_kernel_device": "/dev/sda1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "partition", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:7 Sunday 09 June 2024 04:17:32 +0000 (0:00:00.016) 0:02:29.973 *********** skipping: [sut] => {} TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:15 Sunday 09 June 2024 04:17:32 +0000 (0:00:00.012) 0:02:29.986 *********** ok: [sut] => { "changed": false, "info": { "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9": { "fstype": "xfs", "label": "", "name": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "size": "10G", "type": "crypt", "uuid": "08e47da6-84a9-4243-a1fc-593001dbc7a6" }, "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sda1": { "fstype": "crypto_LUKS", "label": "", "name": "/dev/sda1", "size": "10G", "type": "partition", "uuid": "db2c80a8-0ac5-4d72-acc4-d674a0d22fa9" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "xfs", "label": "", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "82a26900-2b36-49b9-bc49-f4d2f0bb14b8" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:20 Sunday 09 June 2024 04:17:32 +0000 (0:00:00.178) 0:02:30.164 *********** ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002644", "end": "2024-06-09 04:17:32.967139", "rc": 0, "start": "2024-06-09 04:17:32.964495" } STDOUT: # # /etc/fstab # Created by anaconda on Tue May 28 13:15:44 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=82a26900-2b36-49b9-bc49-f4d2f0bb14b8 / xfs defaults 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9 /opt/test1 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:25 Sunday 09 June 2024 04:17:32 +0000 (0:00:00.173) 0:02:30.338 *********** ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003017", "end": "2024-06-09 04:17:33.143182", "failed_when_result": false, "rc": 0, "start": "2024-06-09 04:17:33.140165" } STDOUT: luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9 /dev/sda1 - TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:34 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.176) 0:02:30.515 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml for sut TASK [Set _storage_pool_tests] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:5 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.025) 0:02:30.540 *********** ok: [sut] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:18 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.012) 0:02:30.552 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that VG shared value checks out] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:24 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.012) 0:02:30.565 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify pool subset] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:34 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.012) 0:02:30.578 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-volumes.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:2 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.026) 0:02:30.605 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get the canonical device path for each member device] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:13 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.013) 0:02:30.618 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set pvs lvm length] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:22 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.012) 0:02:30.631 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set pool pvs] ************************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:27 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.012) 0:02:30.644 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify PV count] ********************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:33 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.012) 0:02:30.657 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:42 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.012) 0:02:30.669 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:48 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.012) 0:02:30.682 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:54 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.014) 0:02:30.696 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:59 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.012) 0:02:30.709 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check MD RAID] *********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:73 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.012) 0:02:30.722 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml for sut TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:8 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.023) 0:02:30.745 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:14 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.037) 0:02:30.783 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:21 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.013) 0:02:30.796 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:28 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.012) 0:02:30.809 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:35 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.012) 0:02:30.821 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:45 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.012) 0:02:30.834 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:54 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.012) 0:02:30.847 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:64 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.014) 0:02:30.861 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:74 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.012) 0:02:30.874 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:85 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.012) 0:02:30.886 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:95 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.012) 0:02:30.899 *********** ok: [sut] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:76 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.011) 0:02:30.910 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-lvmraid.yml for sut TASK [Validate pool member LVM RAID settings] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-lvmraid.yml:2 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.025) 0:02:30.935 *********** skipping: [sut] => (item={'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'partition', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9', '_raw_device': '/dev/sda1', '_mount_id': '/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/sda1'}) => { "ansible_loop_var": "storage_test_lvmraid_volume", "changed": false, "skip_reason": "Conditional result was False", "storage_test_lvmraid_volume": { "_device": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "_raw_device": "/dev/sda1", "_raw_kernel_device": "/dev/sda1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "partition", "vdo_pool_size": null } } TASK [Check Thin Pools] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:79 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.016) 0:02:30.952 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-thin.yml for sut TASK [Validate pool member thinpool settings] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-thin.yml:2 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.025) 0:02:30.977 *********** skipping: [sut] => (item={'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'partition', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9', '_raw_device': '/dev/sda1', '_mount_id': '/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/sda1'}) => { "ansible_loop_var": "storage_test_thin_volume", "changed": false, "skip_reason": "Conditional result was False", "storage_test_thin_volume": { "_device": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "_raw_device": "/dev/sda1", "_raw_kernel_device": "/dev/sda1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "partition", "vdo_pool_size": null } } TASK [Check member encryption] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:82 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.017) 0:02:30.995 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:5 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.025) 0:02:31.020 *********** ok: [sut] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:13 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.015) 0:02:31.035 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:20 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.013) 0:02:31.049 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:27 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.014) 0:02:31.063 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:85 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.011) 0:02:31.075 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-vdo.yml for sut TASK [Validate pool member VDO settings] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-vdo.yml:2 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.026) 0:02:31.101 *********** skipping: [sut] => (item={'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'partition', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9', '_raw_device': '/dev/sda1', '_mount_id': '/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/sda1'}) => { "ansible_loop_var": "storage_test_vdo_volume", "changed": false, "skip_reason": "Conditional result was False", "storage_test_vdo_volume": { "_device": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "_raw_device": "/dev/sda1", "_raw_kernel_device": "/dev/sda1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "partition", "vdo_pool_size": null } } TASK [Clean up test variables] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:88 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.017) 0:02:31.118 *********** ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-volumes.yml:3 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.012) 0:02:31.131 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml for sut TASK [Set storage volume test variables] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:2 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.023) 0:02:31.154 *********** ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for {{ storage_test_volume_subset }}] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:21 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.016) 0:02:31.170 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml for sut TASK [Get expected mount device based on device type] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:7 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.057) 0:02:31.228 *********** ok: [sut] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:16 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.015) 0:02:31.243 *********** ok: [sut] => { "ansible_facts": { "storage_test_mount_device_matches": [ { "block_available": 2572529, "block_size": 4096, "block_total": 2598912, "block_used": 26383, "device": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "fstype": "xfs", "inode_available": 5230589, "inode_total": 5230592, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 10537078784, "size_total": 10645143552, "uuid": "08e47da6-84a9-4243-a1fc-593001dbc7a6" } ], "storage_test_mount_expected_match_count": "1", "storage_test_mount_point_matches": [ { "block_available": 2572529, "block_size": 4096, "block_total": 2598912, "block_used": 26383, "device": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "fstype": "xfs", "inode_available": 5230589, "inode_total": 5230592, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 10537078784, "size_total": 10645143552, "uuid": "08e47da6-84a9-4243-a1fc-593001dbc7a6" } ], "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:38 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.019) 0:02:31.263 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:51 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.012) 0:02:31.276 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the current mount state by mount point] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:63 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.015) 0:02:31.291 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:71 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.014) 0:02:31.306 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:83 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.013) 0:02:31.320 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:95 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.012) 0:02:31.332 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the mount fs type] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:110 Sunday 09 June 2024 04:17:33 +0000 (0:00:00.012) 0:02:31.345 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Get path of test volume device] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:122 Sunday 09 June 2024 04:17:34 +0000 (0:00:00.015) 0:02:31.361 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:128 Sunday 09 June 2024 04:17:34 +0000 (0:00:00.012) 0:02:31.373 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:134 Sunday 09 June 2024 04:17:34 +0000 (0:00:00.012) 0:02:31.386 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:146 Sunday 09 June 2024 04:17:34 +0000 (0:00:00.013) 0:02:31.399 *********** ok: [sut] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_device_matches": null, "storage_test_mount_expected_match_count": null, "storage_test_mount_point_matches": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:2 Sunday 09 June 2024 04:17:34 +0000 (0:00:00.011) 0:02:31.411 *********** ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:40 Sunday 09 June 2024 04:17:34 +0000 (0:00:00.024) 0:02:31.436 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:48 Sunday 09 June 2024 04:17:34 +0000 (0:00:00.015) 0:02:31.451 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:58 Sunday 09 June 2024 04:17:34 +0000 (0:00:00.014) 0:02:31.466 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:71 Sunday 09 June 2024 04:17:34 +0000 (0:00:00.012) 0:02:31.478 *********** ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml:3 Sunday 09 June 2024 04:17:34 +0000 (0:00:00.012) 0:02:31.491 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml:12 Sunday 09 June 2024 04:17:34 +0000 (0:00:00.017) 0:02:31.508 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:3 Sunday 09 June 2024 04:17:34 +0000 (0:00:00.018) 0:02:31.526 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906649.58138, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1717906649.58138, "dev": 5, "device_type": 2049, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 896, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/blockdevice", "mode": "0660", "mtime": 1717906649.58138, "nlink": 1, "path": "/dev/sda1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:9 Sunday 09 June 2024 04:17:34 +0000 (0:00:00.214) 0:02:31.741 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:16 Sunday 09 June 2024 04:17:34 +0000 (0:00:00.017) 0:02:31.759 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:24 Sunday 09 June 2024 04:17:34 +0000 (0:00:00.013) 0:02:31.772 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:30 Sunday 09 June 2024 04:17:34 +0000 (0:00:00.016) 0:02:31.788 *********** ok: [sut] => { "ansible_facts": { "st_volume_type": "partition" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:34 Sunday 09 June 2024 04:17:34 +0000 (0:00:00.013) 0:02:31.802 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:39 Sunday 09 June 2024 04:17:34 +0000 (0:00:00.012) 0:02:31.815 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:3 Sunday 09 June 2024 04:17:34 +0000 (0:00:00.016) 0:02:31.832 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906649.8513808, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1717906649.8513808, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 965, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1717906649.8513808, "nlink": 1, "path": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:10 Sunday 09 June 2024 04:17:34 +0000 (0:00:00.180) 0:02:32.012 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:17 Sunday 09 June 2024 04:17:35 +0000 (0:00:00.861) 0:02:32.873 *********** ok: [sut] => { "changed": false, "cmd": [ "cryptsetup", "luksDump", "/dev/sda1" ], "delta": "0:00:00.006090", "end": "2024-06-09 04:17:35.678848", "rc": 0, "start": "2024-06-09 04:17:35.672758" } STDOUT: LUKS header information Version: 2 Epoch: 3 Metadata area: 16384 [bytes] Keyslots area: 16744448 [bytes] UUID: db2c80a8-0ac5-4d72-acc4-d674a0d22fa9 Label: (no label) Subsystem: (no subsystem) Flags: (no flags) Data segments: 0: crypt offset: 16777216 [bytes] length: (whole device) cipher: aes-xts-plain64 sector: 512 [bytes] Keyslots: 0: luks2 Key: 512 bits Priority: normal Cipher: aes-xts-plain64 Cipher key: 512 bits PBKDF: argon2id Time cost: 4 Memory: 672071 Threads: 2 Salt: 67 12 a8 7d 4d b7 b6 4f 73 84 dd 71 d1 41 fe d8 32 2f fe 53 c1 1f a8 65 6f d6 d3 fc f3 10 a7 67 AF stripes: 4000 AF hash: sha256 Area offset:32768 [bytes] Area length:258048 [bytes] Digest ID: 0 Tokens: Digests: 0: pbkdf2 Hash: sha256 Iterations: 103044 Salt: 84 b3 e8 3b 2f 8c cc 6f 4e 51 0a a4 1e bc 66 fd cc da f6 de b2 d6 d7 a3 1c 22 14 96 04 e1 d5 57 Digest: 65 2c a1 04 97 0c c8 c8 80 6d 70 fa 13 0d f6 af 99 55 ca d2 6c 64 87 9a 02 dc 79 cd 29 e8 e7 59 TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:23 Sunday 09 June 2024 04:17:35 +0000 (0:00:00.176) 0:02:33.049 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:32 Sunday 09 June 2024 04:17:35 +0000 (0:00:00.017) 0:02:33.067 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:45 Sunday 09 June 2024 04:17:35 +0000 (0:00:00.019) 0:02:33.086 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:51 Sunday 09 June 2024 04:17:35 +0000 (0:00:00.016) 0:02:33.102 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:56 Sunday 09 June 2024 04:17:35 +0000 (0:00:00.016) 0:02:33.119 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:69 Sunday 09 June 2024 04:17:35 +0000 (0:00:00.013) 0:02:33.133 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:81 Sunday 09 June 2024 04:17:35 +0000 (0:00:00.014) 0:02:33.147 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:94 Sunday 09 June 2024 04:17:35 +0000 (0:00:00.013) 0:02:33.161 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [ "luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9 /dev/sda1 -" ], "_storage_test_expected_crypttab_entries": "1", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:106 Sunday 09 June 2024 04:17:35 +0000 (0:00:00.018) 0:02:33.180 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:114 Sunday 09 June 2024 04:17:35 +0000 (0:00:00.015) 0:02:33.195 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:122 Sunday 09 June 2024 04:17:35 +0000 (0:00:00.016) 0:02:33.211 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:131 Sunday 09 June 2024 04:17:35 +0000 (0:00:00.017) 0:02:33.229 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:140 Sunday 09 June 2024 04:17:35 +0000 (0:00:00.017) 0:02:33.246 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:8 Sunday 09 June 2024 04:17:35 +0000 (0:00:00.013) 0:02:33.259 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:14 Sunday 09 June 2024 04:17:35 +0000 (0:00:00.014) 0:02:33.274 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:21 Sunday 09 June 2024 04:17:35 +0000 (0:00:00.012) 0:02:33.286 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:28 Sunday 09 June 2024 04:17:35 +0000 (0:00:00.012) 0:02:33.299 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:35 Sunday 09 June 2024 04:17:35 +0000 (0:00:00.012) 0:02:33.312 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:45 Sunday 09 June 2024 04:17:35 +0000 (0:00:00.012) 0:02:33.324 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:54 Sunday 09 June 2024 04:17:35 +0000 (0:00:00.012) 0:02:33.337 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:63 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.014) 0:02:33.351 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:72 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.012) 0:02:33.364 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:81 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.012) 0:02:33.377 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:3 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.012) 0:02:33.389 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested size of the volume] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:11 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.013) 0:02:33.403 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:20 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.013) 0:02:33.417 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:28 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.015) 0:02:33.432 *********** ok: [sut] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Get the size of parent/pool device] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:32 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.013) 0:02:33.446 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show test pool] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:46 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.014) 0:02:33.460 *********** skipping: [sut] => {} TASK [Show test blockinfo] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:50 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.014) 0:02:33.475 *********** skipping: [sut] => {} TASK [Show test pool size] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:54 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.013) 0:02:33.488 *********** skipping: [sut] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:58 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.013) 0:02:33.502 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:68 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.015) 0:02:33.518 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:72 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.012) 0:02:33.531 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:77 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.012) 0:02:33.544 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:83 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.012) 0:02:33.556 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:88 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.012) 0:02:33.569 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:96 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.012) 0:02:33.582 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:104 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.012) 0:02:33.595 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:109 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.012) 0:02:33.607 *********** skipping: [sut] => {} TASK [Show volume thin pool size] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:113 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.012) 0:02:33.620 *********** skipping: [sut] => {} TASK [Show test volume size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:117 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.012) 0:02:33.633 *********** skipping: [sut] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:121 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.012) 0:02:33.645 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:129 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.012) 0:02:33.658 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:138 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.014) 0:02:33.672 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:142 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.013) 0:02:33.685 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:150 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.012) 0:02:33.698 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:156 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.012) 0:02:33.711 *********** ok: [sut] => { "storage_test_actual_size": { "changed": false, "skip_reason": "Conditional result was False", "skipped": true } } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:160 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.013) 0:02:33.725 *********** ok: [sut] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Assert expected size is actual size] ************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:164 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.013) 0:02:33.739 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:5 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.015) 0:02:33.754 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:13 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.013) 0:02:33.768 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:18 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.012) 0:02:33.780 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV cache size] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:27 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.012) 0:02:33.793 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:35 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.012) 0:02:33.806 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:41 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.012) 0:02:33.818 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:47 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.014) 0:02:33.833 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:27 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.015) 0:02:33.848 *********** ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:44 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.014) 0:02:33.863 *********** TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:54 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.012) 0:02:33.875 *********** ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Create a file] *********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/create-test-file.yml:12 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.013) 0:02:33.888 *********** changed: [sut] => { "changed": true, "dest": "/opt/test1/quux", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "secontext": "unconfined_u:object_r:unlabeled_t:s0", "size": 0, "state": "file", "uid": 0 } TASK [Test for correct handling of safe_mode] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:222 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.187) 0:02:34.075 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml for sut TASK [Store global variable value copy] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:4 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.057) 0:02:34.133 *********** ok: [sut] => { "ansible_facts": { "storage_pools_global": [], "storage_safe_mode_global": true, "storage_volumes_global": [] }, "changed": false } TASK [Verify role raises correct error] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:10 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.016) 0:02:34.150 *********** TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.016) 0:02:34.166 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.018) 0:02:34.185 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.015) 0:02:34.200 *********** skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [linux-system-roles.storage : Check if system is ostree] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:26 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.032) 0:02:34.233 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set flag to indicate system is ostree] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:31 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.012) 0:02:34.246 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.012) 0:02:34.259 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.011) 0:02:34.270 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.012) 0:02:34.283 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Sunday 09 June 2024 04:17:36 +0000 (0:00:00.025) 0:02:34.309 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kmod-kvdo libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet vdo xfsprogs TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:11 Sunday 09 June 2024 04:17:37 +0000 (0:00:00.855) 0:02:35.164 *********** ok: [sut] => { "storage_pools": [ { "disks": [ "sda" ], "name": "foo", "type": "partition", "volumes": [ { "encryption": false, "encryption_password": "yabbadabbadoo", "mount_point": "/opt/test1", "name": "test1", "size": "4g", "type": "partition" } ] } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:16 Sunday 09 June 2024 04:17:37 +0000 (0:00:00.016) 0:02:35.181 *********** ok: [sut] => { "storage_volumes": [] } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:21 Sunday 09 June 2024 04:17:37 +0000 (0:00:00.016) 0:02:35.197 *********** ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:34 Sunday 09 June 2024 04:17:38 +0000 (0:00:01.096) 0:02:36.294 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for sut TASK [linux-system-roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Sunday 09 June 2024 04:17:38 +0000 (0:00:00.023) 0:02:36.317 *********** TASK [linux-system-roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Sunday 09 June 2024 04:17:38 +0000 (0:00:00.012) 0:02:36.329 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable COPRs] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:20 Sunday 09 June 2024 04:17:38 +0000 (0:00:00.012) 0:02:36.342 *********** TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:41 Sunday 09 June 2024 04:17:39 +0000 (0:00:00.011) 0:02:36.354 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kpartx TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 Sunday 09 June 2024 04:17:39 +0000 (0:00:00.853) 0:02:37.207 *********** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-cryptsetup@luks\\x2dff6bfc93\\x2dde52\\x2d4449\\x2d933e\\x2d5bd1452b88ce.service": { "name": "systemd-cryptsetup@luks\\x2dff6bfc93\\x2dde52\\x2d4449\\x2d933e\\x2d5bd1452b88ce.service", "source": "systemd", "state": "stopped", "status": "generated" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:64 Sunday 09 June 2024 04:17:41 +0000 (0:00:01.468) 0:02:38.675 *********** ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [ "systemd-cryptsetup@luks\\x2dff6bfc93\\x2dde52\\x2d4449\\x2d933e\\x2d5bd1452b88ce.service" ] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:78 Sunday 09 June 2024 04:17:41 +0000 (0:00:00.021) 0:02:38.696 *********** changed: [sut] => (item=systemd-cryptsetup@luks\x2dff6bfc93\x2dde52\x2d4449\x2d933e\x2d5bd1452b88ce.service) => { "ansible_loop_var": "item", "changed": true, "item": "systemd-cryptsetup@luks\\x2dff6bfc93\\x2dde52\\x2d4449\\x2d933e\\x2d5bd1452b88ce.service", "name": "systemd-cryptsetup@luks\\x2dff6bfc93\\x2dde52\\x2d4449\\x2d933e\\x2d5bd1452b88ce.service", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "dev-sda.device cryptsetup-pre.target systemd-udevd-kernel.socket \"system-systemd\\\\x2dcryptsetup.slice\" systemd-journald.socket", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "umount.target cryptsetup.target \"blockdev@dev-mapper-luks\\\\x2dff6bfc93\\\\x2dde52\\\\x2d4449\\\\x2d933e\\\\x2d5bd1452b88ce.target\"", "BindsTo": "dev-sda.device", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "umount.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "no", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Cryptography Setup for luks-ff6bfc93-de52-4449-933e-5bd1452b88ce", "DevicePolicy": "auto", "Documentation": "\"man:crypttab(5)\" \"man:systemd-cryptsetup-generator(8)\" \"man:systemd-cryptsetup@.service(8)\"", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup attach luks-ff6bfc93-de52-4449-933e-5bd1452b88ce /dev/sda - ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup attach luks-ff6bfc93-de52-4449-933e-5bd1452b88ce /dev/sda - ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup detach luks-ff6bfc93-de52-4449-933e-5bd1452b88ce ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup detach luks-ff6bfc93-de52-4449-933e-5bd1452b88ce ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/systemd-cryptsetup@luks\\x2dff6bfc93\\x2dde52\\x2d4449\\x2d933e\\x2d5bd1452b88ce.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "systemd-cryptsetup@luks\\x2dff6bfc93\\x2dde52\\x2d4449\\x2d933e\\x2d5bd1452b88ce.service", "IgnoreOnIsolate": "yes", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "shared", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13972", "LimitNPROCSoft": "13972", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13972", "LimitSIGPENDINGSoft": "13972", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "\"systemd-cryptsetup@luks\\\\x2dff6bfc93\\\\x2dde52\\\\x2d4449\\\\x2d933e\\\\x2d5bd1452b88ce.service\"", "NeedDaemonReload": "yes", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "500", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "RequiredBy": "cryptsetup.target", "Requires": "\"system-systemd\\\\x2dcryptsetup.slice\"", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system-systemd\\x2dcryptsetup.slice", "SourcePath": "/etc/crypttab", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sun 2024-06-09 04:17:31 UTC", "StateChangeTimestampMonotonic": "592026546", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22356", "TimeoutAbortUSec": "infinity", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "infinity", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "\"blockdev@dev-mapper-luks\\\\x2dff6bfc93\\\\x2dde52\\\\x2d4449\\\\x2d933e\\\\x2d5bd1452b88ce.target\"", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 Sunday 09 June 2024 04:17:41 +0000 (0:00:00.514) 0:02:39.211 *********** fatal: [sut]: FAILED! => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } MSG: cannot remove existing formatting on device 'luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9' in safe mode due to encryption removal TASK [linux-system-roles.storage : Failed message] ***************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:106 Sunday 09 June 2024 04:17:42 +0000 (0:00:01.101) 0:02:40.312 *********** fatal: [sut]: FAILED! => { "changed": false } MSG: {'msg': "cannot remove existing formatting on device 'luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9' in safe mode due to encryption removal", 'changed': False, 'actions': [], 'leaves': [], 'mounts': [], 'crypts': [], 'pools': [], 'volumes': [], 'packages': [], 'failed': True, 'invocation': {'module_args': {'pools': [{'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'partition', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': 0, 'encryption_luks_version': 'luks2', 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'partition', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None}]}], 'volumes': [], 'use_partitions': None, 'disklabel_type': None, 'pool_defaults': {'state': 'present', 'type': 'lvm', 'disks': [], 'volumes': [], 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'shared': False}, 'volume_defaults': {'state': 'present', 'type': 'lvm', 'size': 0, 'disks': [], 'fs_type': 'xfs', 'fs_label': '', 'fs_create_options': '', 'fs_overwrite_existing': True, 'mount_point': '', 'mount_options': 'defaults', 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_stripe_size': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': []}, 'safe_mode': True, 'packages_only': False, 'diskvolume_mkfs_option_map': {}}}, '_ansible_no_log': False} TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Sunday 09 June 2024 04:17:42 +0000 (0:00:00.016) 0:02:40.329 *********** changed: [sut] => (item=systemd-cryptsetup@luks\x2dff6bfc93\x2dde52\x2d4449\x2d933e\x2d5bd1452b88ce.service) => { "ansible_loop_var": "item", "changed": true, "item": "systemd-cryptsetup@luks\\x2dff6bfc93\\x2dde52\\x2d4449\\x2d933e\\x2d5bd1452b88ce.service", "name": "systemd-cryptsetup@luks\\x2dff6bfc93\\x2dde52\\x2d4449\\x2d933e\\x2d5bd1452b88ce.service", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "no", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "systemd-cryptsetup@luks\\x2dff6bfc93\\x2dde52\\x2d4449\\x2d933e\\x2d5bd1452b88ce.service", "DevicePolicy": "auto", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/etc/systemd/system/systemd-cryptsetup@luks\\x2dff6bfc93\\x2dde52\\x2d4449\\x2d933e\\x2d5bd1452b88ce.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "systemd-cryptsetup@luks\\x2dff6bfc93\\x2dde52\\x2d4449\\x2d933e\\x2d5bd1452b88ce.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "463069184", "LimitMEMLOCKSoft": "463069184", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "1073741816", "LimitNOFILESoft": "1073741816", "LimitNPROC": "13972", "LimitNPROCSoft": "13972", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13972", "LimitSIGPENDINGSoft": "13972", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadError": "org.freedesktop.systemd1.UnitMasked \"Unit systemd-cryptsetup@luks\\x2dff6bfc93\\x2dde52\\x2d4449\\x2d933e\\x2d5bd1452b88ce.service is masked.\"", "LoadState": "masked", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "\"systemd-cryptsetup@luks\\\\x2dff6bfc93\\\\x2dde52\\\\x2d4449\\\\x2d933e\\\\x2d5bd1452b88ce.service\"", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "inherit", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22356", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "masked", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [Check that we failed in the role] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:29 Sunday 09 June 2024 04:17:43 +0000 (0:00:00.508) 0:02:40.837 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the blivet output and error message are correct] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:34 Sunday 09 June 2024 04:17:43 +0000 (0:00:00.015) 0:02:40.852 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify correct exception or error message] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:45 Sunday 09 June 2024 04:17:43 +0000 (0:00:00.018) 0:02:40.871 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Stat the file] *********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-data-preservation.yml:11 Sunday 09 June 2024 04:17:43 +0000 (0:00:00.012) 0:02:40.884 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906656.7014046, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1717906656.7014046, "dev": 64768, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0644", "mtime": 1717906656.7014046, "nlink": 1, "path": "/opt/test1/quux", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 0, "uid": 0, "version": "351049962", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Assert file presence] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-data-preservation.yml:16 Sunday 09 June 2024 04:17:43 +0000 (0:00:00.176) 0:02:41.061 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Remove the encryption layer] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:246 Sunday 09 June 2024 04:17:43 +0000 (0:00:00.015) 0:02:41.076 *********** TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Sunday 09 June 2024 04:17:43 +0000 (0:00:00.079) 0:02:41.155 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Sunday 09 June 2024 04:17:43 +0000 (0:00:00.019) 0:02:41.174 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Sunday 09 June 2024 04:17:43 +0000 (0:00:00.015) 0:02:41.190 *********** skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [linux-system-roles.storage : Check if system is ostree] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:26 Sunday 09 June 2024 04:17:43 +0000 (0:00:00.032) 0:02:41.222 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set flag to indicate system is ostree] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:31 Sunday 09 June 2024 04:17:43 +0000 (0:00:00.013) 0:02:41.235 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Sunday 09 June 2024 04:17:43 +0000 (0:00:00.012) 0:02:41.248 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Sunday 09 June 2024 04:17:43 +0000 (0:00:00.012) 0:02:41.260 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Sunday 09 June 2024 04:17:43 +0000 (0:00:00.011) 0:02:41.272 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Sunday 09 June 2024 04:17:43 +0000 (0:00:00.024) 0:02:41.297 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kmod-kvdo libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet vdo xfsprogs TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:11 Sunday 09 June 2024 04:17:44 +0000 (0:00:00.858) 0:02:42.155 *********** ok: [sut] => { "storage_pools": [ { "disks": [ "sda" ], "name": "foo", "type": "partition", "volumes": [ { "encryption": false, "encryption_password": "yabbadabbadoo", "mount_point": "/opt/test1", "name": "test1", "size": "4g", "type": "partition" } ] } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:16 Sunday 09 June 2024 04:17:44 +0000 (0:00:00.015) 0:02:42.171 *********** ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:21 Sunday 09 June 2024 04:17:44 +0000 (0:00:00.014) 0:02:42.185 *********** ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:34 Sunday 09 June 2024 04:17:45 +0000 (0:00:01.096) 0:02:43.282 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for sut TASK [linux-system-roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Sunday 09 June 2024 04:17:45 +0000 (0:00:00.023) 0:02:43.305 *********** TASK [linux-system-roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Sunday 09 June 2024 04:17:45 +0000 (0:00:00.012) 0:02:43.317 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable COPRs] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:20 Sunday 09 June 2024 04:17:45 +0000 (0:00:00.013) 0:02:43.330 *********** TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:41 Sunday 09 June 2024 04:17:45 +0000 (0:00:00.012) 0:02:43.342 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kpartx TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 Sunday 09 June 2024 04:17:46 +0000 (0:00:00.859) 0:02:44.202 *********** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service": { "name": "systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service", "source": "systemd", "state": "stopped", "status": "generated" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:64 Sunday 09 June 2024 04:17:48 +0000 (0:00:01.482) 0:02:45.684 *********** ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [ "systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service" ] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:78 Sunday 09 June 2024 04:17:48 +0000 (0:00:00.021) 0:02:45.705 *********** changed: [sut] => (item=systemd-cryptsetup@luks\x2ddb2c80a8\x2d0ac5\x2d4d72\x2dacc4\x2dd674a0d22fa9.service) => { "ansible_loop_var": "item", "changed": true, "item": "systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service", "name": "systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "cryptsetup-pre.target \"system-systemd\\\\x2dcryptsetup.slice\" dev-sda1.device systemd-journald.socket systemd-udevd-kernel.socket", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "cryptsetup.target \"blockdev@dev-mapper-luks\\\\x2ddb2c80a8\\\\x2d0ac5\\\\x2d4d72\\\\x2dacc4\\\\x2dd674a0d22fa9.target\" umount.target", "BindsTo": "dev-sda1.device", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "umount.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "no", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Cryptography Setup for luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "DevicePolicy": "auto", "Documentation": "\"man:crypttab(5)\" \"man:systemd-cryptsetup-generator(8)\" \"man:systemd-cryptsetup@.service(8)\"", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup attach luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9 /dev/sda1 - ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup attach luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9 /dev/sda1 - ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup detach luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup detach luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service", "IgnoreOnIsolate": "yes", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "shared", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13972", "LimitNPROCSoft": "13972", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13972", "LimitSIGPENDINGSoft": "13972", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "\"systemd-cryptsetup@luks\\\\x2ddb2c80a8\\\\x2d0ac5\\\\x2d4d72\\\\x2dacc4\\\\x2dd674a0d22fa9.service\"", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "500", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "RequiredBy": "cryptsetup.target \"dev-mapper-luks\\\\x2ddb2c80a8\\\\x2d0ac5\\\\x2d4d72\\\\x2dacc4\\\\x2dd674a0d22fa9.device\"", "Requires": "\"system-systemd\\\\x2dcryptsetup.slice\"", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system-systemd\\x2dcryptsetup.slice", "SourcePath": "/etc/crypttab", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sun 2024-06-09 04:17:43 UTC", "StateChangeTimestampMonotonic": "604124159", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22356", "TimeoutAbortUSec": "infinity", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "infinity", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "\"blockdev@dev-mapper-luks\\\\x2ddb2c80a8\\\\x2d0ac5\\\\x2d4d72\\\\x2dacc4\\\\x2dd674a0d22fa9.target\"", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 Sunday 09 June 2024 04:17:48 +0000 (0:00:00.509) 0:02:46.215 *********** changed: [sut] => { "actions": [ { "action": "destroy format", "device": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda1", "fs_type": "luks" }, { "action": "create format", "device": "/dev/sda1", "fs_type": "xfs" } ], "changed": true, "crypts": [ { "backing_device": "/dev/sda1", "name": "luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "password": "-", "state": "absent" } ], "leaves": [ "/dev/sda1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "state": "absent" }, { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=dfdb6909-6730-45cd-8755-b0c1764822fa", "state": "mounted" } ], "packages": [ "xfsprogs" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "partition", "volumes": [ { "_device": "/dev/sda1", "_kernel_device": "/dev/sda1", "_mount_id": "UUID=dfdb6909-6730-45cd-8755-b0c1764822fa", "_raw_device": "/dev/sda1", "_raw_kernel_device": "/dev/sda1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks2", "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "partition", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:98 Sunday 09 June 2024 04:17:50 +0000 (0:00:01.688) 0:02:47.903 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Sunday 09 June 2024 04:17:50 +0000 (0:00:00.012) 0:02:47.916 *********** changed: [sut] => (item=systemd-cryptsetup@luks\x2ddb2c80a8\x2d0ac5\x2d4d72\x2dacc4\x2dd674a0d22fa9.service) => { "ansible_loop_var": "item", "changed": true, "item": "systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service", "name": "systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "no", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service", "DevicePolicy": "auto", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/etc/systemd/system/systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "463069184", "LimitMEMLOCKSoft": "463069184", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "1073741816", "LimitNOFILESoft": "1073741816", "LimitNPROC": "13972", "LimitNPROCSoft": "13972", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13972", "LimitSIGPENDINGSoft": "13972", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadError": "org.freedesktop.systemd1.UnitMasked \"Unit systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service is masked.\"", "LoadState": "masked", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "\"systemd-cryptsetup@luks\\\\x2ddb2c80a8\\\\x2d0ac5\\\\x2d4d72\\\\x2dacc4\\\\x2dd674a0d22fa9.service\"", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "RequiredBy": "cryptsetup.target \"dev-mapper-luks\\\\x2ddb2c80a8\\\\x2d0ac5\\\\x2d4d72\\\\x2dacc4\\\\x2dd674a0d22fa9.device\"", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "inherit", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sun 2024-06-09 04:17:43 UTC", "StateChangeTimestampMonotonic": "604124159", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22356", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "masked", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:116 Sunday 09 June 2024 04:17:51 +0000 (0:00:00.554) 0:02:48.470 *********** ok: [sut] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda1", "fs_type": "luks" }, { "action": "create format", "device": "/dev/sda1", "fs_type": "xfs" } ], "changed": true, "crypts": [ { "backing_device": "/dev/sda1", "name": "luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "password": "-", "state": "absent" } ], "failed": false, "leaves": [ "/dev/sda1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "state": "absent" }, { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=dfdb6909-6730-45cd-8755-b0c1764822fa", "state": "mounted" } ], "packages": [ "xfsprogs" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "partition", "volumes": [ { "_device": "/dev/sda1", "_kernel_device": "/dev/sda1", "_mount_id": "UUID=dfdb6909-6730-45cd-8755-b0c1764822fa", "_raw_device": "/dev/sda1", "_raw_kernel_device": "/dev/sda1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks2", "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "partition", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:125 Sunday 09 June 2024 04:17:51 +0000 (0:00:00.017) 0:02:48.487 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "partition", "volumes": [ { "_device": "/dev/sda1", "_kernel_device": "/dev/sda1", "_mount_id": "UUID=dfdb6909-6730-45cd-8755-b0c1764822fa", "_raw_device": "/dev/sda1", "_raw_kernel_device": "/dev/sda1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks2", "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "partition", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:129 Sunday 09 June 2024 04:17:51 +0000 (0:00:00.015) 0:02:48.503 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:145 Sunday 09 June 2024 04:17:51 +0000 (0:00:00.015) 0:02:48.519 *********** changed: [sut] => (item={'src': '/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9', 'path': '/opt/test1', 'state': 'absent', 'fstype': 'xfs'}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "state": "absent" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9" } TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:157 Sunday 09 June 2024 04:17:51 +0000 (0:00:00.179) 0:02:48.698 *********** ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:162 Sunday 09 June 2024 04:17:51 +0000 (0:00:00.501) 0:02:49.200 *********** changed: [sut] => (item={'src': 'UUID=dfdb6909-6730-45cd-8755-b0c1764822fa', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=dfdb6909-6730-45cd-8755-b0c1764822fa", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "UUID=dfdb6909-6730-45cd-8755-b0c1764822fa" } TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Sunday 09 June 2024 04:17:52 +0000 (0:00:00.193) 0:02:49.393 *********** skipping: [sut] => (item={'src': 'UUID=dfdb6909-6730-45cd-8755-b0c1764822fa', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=dfdb6909-6730-45cd-8755-b0c1764822fa", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:189 Sunday 09 June 2024 04:17:52 +0000 (0:00:00.017) 0:02:49.411 *********** ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:197 Sunday 09 June 2024 04:17:52 +0000 (0:00:00.482) 0:02:49.894 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906653.1413922, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "18dc8278d57a2ce123626f62138d9f4200ea2b34", "ctime": 1717906651.8893878, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 39846212, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0600", "mtime": 1717906651.888388, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 54, "uid": 0, "version": "3477675779", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:202 Sunday 09 June 2024 04:17:52 +0000 (0:00:00.179) 0:02:50.073 *********** changed: [sut] => (item={'backing_device': '/dev/sda1', 'name': 'luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9', 'password': '-', 'state': 'absent'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sda1", "name": "luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "password": "-", "state": "absent" }, "found": 1 } MSG: 1 line(s) removed TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:224 Sunday 09 June 2024 04:17:52 +0000 (0:00:00.181) 0:02:50.255 *********** ok: [sut] TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:263 Sunday 09 June 2024 04:17:53 +0000 (0:00:00.640) 0:02:50.895 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:2 Sunday 09 June 2024 04:17:53 +0000 (0:00:00.033) 0:02:50.928 *********** ok: [sut] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "partition", "volumes": [ { "_device": "/dev/sda1", "_kernel_device": "/dev/sda1", "_mount_id": "UUID=dfdb6909-6730-45cd-8755-b0c1764822fa", "_raw_device": "/dev/sda1", "_raw_kernel_device": "/dev/sda1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks2", "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "partition", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:7 Sunday 09 June 2024 04:17:53 +0000 (0:00:00.017) 0:02:50.946 *********** skipping: [sut] => {} TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:15 Sunday 09 June 2024 04:17:53 +0000 (0:00:00.012) 0:02:50.959 *********** ok: [sut] => { "changed": false, "info": { "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sda1": { "fstype": "xfs", "label": "", "name": "/dev/sda1", "size": "10G", "type": "partition", "uuid": "dfdb6909-6730-45cd-8755-b0c1764822fa" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "xfs", "label": "", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "82a26900-2b36-49b9-bc49-f4d2f0bb14b8" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:20 Sunday 09 June 2024 04:17:53 +0000 (0:00:00.175) 0:02:51.135 *********** ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002449", "end": "2024-06-09 04:17:53.931471", "rc": 0, "start": "2024-06-09 04:17:53.929022" } STDOUT: # # /etc/fstab # Created by anaconda on Tue May 28 13:15:44 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=82a26900-2b36-49b9-bc49-f4d2f0bb14b8 / xfs defaults 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 UUID=dfdb6909-6730-45cd-8755-b0c1764822fa /opt/test1 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:25 Sunday 09 June 2024 04:17:53 +0000 (0:00:00.168) 0:02:51.303 *********** ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002675", "end": "2024-06-09 04:17:54.103579", "failed_when_result": false, "rc": 0, "start": "2024-06-09 04:17:54.100904" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:34 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.171) 0:02:51.475 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml for sut TASK [Set _storage_pool_tests] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:5 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.026) 0:02:51.501 *********** ok: [sut] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:18 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.012) 0:02:51.513 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that VG shared value checks out] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:24 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.015) 0:02:51.529 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify pool subset] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:34 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.012) 0:02:51.542 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-volumes.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:2 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.026) 0:02:51.569 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get the canonical device path for each member device] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:13 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.013) 0:02:51.582 *********** TASK [Set pvs lvm length] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:22 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.011) 0:02:51.594 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set pool pvs] ************************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:27 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.012) 0:02:51.606 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify PV count] ********************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:33 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.012) 0:02:51.619 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:42 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.012) 0:02:51.632 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:48 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.017) 0:02:51.649 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:54 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.041) 0:02:51.691 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:59 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.013) 0:02:51.704 *********** TASK [Check MD RAID] *********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:73 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.011) 0:02:51.716 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml for sut TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:8 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.022) 0:02:51.738 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:14 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.016) 0:02:51.755 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:21 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.013) 0:02:51.769 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:28 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.013) 0:02:51.782 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:35 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.017) 0:02:51.800 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:45 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.016) 0:02:51.816 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:54 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.015) 0:02:51.832 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:64 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.020) 0:02:51.852 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:74 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.014) 0:02:51.866 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:85 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.015) 0:02:51.882 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:95 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.014) 0:02:51.897 *********** ok: [sut] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:76 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.013) 0:02:51.910 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-lvmraid.yml for sut TASK [Validate pool member LVM RAID settings] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-lvmraid.yml:2 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.034) 0:02:51.944 *********** skipping: [sut] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': 0, 'encryption_luks_version': 'luks2', 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'partition', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/sda1', '_raw_device': '/dev/sda1', '_mount_id': 'UUID=dfdb6909-6730-45cd-8755-b0c1764822fa', '_kernel_device': '/dev/sda1', '_raw_kernel_device': '/dev/sda1'}) => { "ansible_loop_var": "storage_test_lvmraid_volume", "changed": false, "skip_reason": "Conditional result was False", "storage_test_lvmraid_volume": { "_device": "/dev/sda1", "_kernel_device": "/dev/sda1", "_mount_id": "UUID=dfdb6909-6730-45cd-8755-b0c1764822fa", "_raw_device": "/dev/sda1", "_raw_kernel_device": "/dev/sda1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks2", "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "partition", "vdo_pool_size": null } } TASK [Check Thin Pools] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:79 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.020) 0:02:51.965 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-thin.yml for sut TASK [Validate pool member thinpool settings] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-thin.yml:2 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.035) 0:02:52.000 *********** skipping: [sut] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': 0, 'encryption_luks_version': 'luks2', 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'partition', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/sda1', '_raw_device': '/dev/sda1', '_mount_id': 'UUID=dfdb6909-6730-45cd-8755-b0c1764822fa', '_kernel_device': '/dev/sda1', '_raw_kernel_device': '/dev/sda1'}) => { "ansible_loop_var": "storage_test_thin_volume", "changed": false, "skip_reason": "Conditional result was False", "storage_test_thin_volume": { "_device": "/dev/sda1", "_kernel_device": "/dev/sda1", "_mount_id": "UUID=dfdb6909-6730-45cd-8755-b0c1764822fa", "_raw_device": "/dev/sda1", "_raw_kernel_device": "/dev/sda1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks2", "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "partition", "vdo_pool_size": null } } TASK [Check member encryption] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:82 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.017) 0:02:52.018 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:5 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.026) 0:02:52.044 *********** ok: [sut] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:13 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.016) 0:02:52.061 *********** TASK [Validate pool member crypttab entries] *********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:20 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.011) 0:02:52.073 *********** TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:27 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.011) 0:02:52.084 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:85 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.013) 0:02:52.098 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-vdo.yml for sut TASK [Validate pool member VDO settings] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-vdo.yml:2 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.027) 0:02:52.126 *********** skipping: [sut] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': 0, 'encryption_luks_version': 'luks2', 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'partition', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/sda1', '_raw_device': '/dev/sda1', '_mount_id': 'UUID=dfdb6909-6730-45cd-8755-b0c1764822fa', '_kernel_device': '/dev/sda1', '_raw_kernel_device': '/dev/sda1'}) => { "ansible_loop_var": "storage_test_vdo_volume", "changed": false, "skip_reason": "Conditional result was False", "storage_test_vdo_volume": { "_device": "/dev/sda1", "_kernel_device": "/dev/sda1", "_mount_id": "UUID=dfdb6909-6730-45cd-8755-b0c1764822fa", "_raw_device": "/dev/sda1", "_raw_kernel_device": "/dev/sda1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks2", "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "partition", "vdo_pool_size": null } } TASK [Clean up test variables] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:88 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.018) 0:02:52.144 *********** ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-volumes.yml:3 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.011) 0:02:52.156 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml for sut TASK [Set storage volume test variables] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:2 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.028) 0:02:52.185 *********** ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for {{ storage_test_volume_subset }}] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:21 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.016) 0:02:52.201 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml for sut TASK [Get expected mount device based on device type] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:7 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.060) 0:02:52.262 *********** ok: [sut] => { "ansible_facts": { "storage_test_device_path": "/dev/sda1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:16 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.016) 0:02:52.279 *********** ok: [sut] => { "ansible_facts": { "storage_test_mount_device_matches": [ { "block_available": 2576597, "block_size": 4096, "block_total": 2603008, "block_used": 26411, "device": "/dev/sda1", "fstype": "xfs", "inode_available": 5238781, "inode_total": 5238784, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 10553741312, "size_total": 10661920768, "uuid": "dfdb6909-6730-45cd-8755-b0c1764822fa" } ], "storage_test_mount_expected_match_count": "1", "storage_test_mount_point_matches": [ { "block_available": 2576597, "block_size": 4096, "block_total": 2603008, "block_used": 26411, "device": "/dev/sda1", "fstype": "xfs", "inode_available": 5238781, "inode_total": 5238784, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 10553741312, "size_total": 10661920768, "uuid": "dfdb6909-6730-45cd-8755-b0c1764822fa" } ], "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:38 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.020) 0:02:52.299 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:51 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.012) 0:02:52.312 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the current mount state by mount point] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:63 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.017) 0:02:52.329 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:71 Sunday 09 June 2024 04:17:54 +0000 (0:00:00.014) 0:02:52.344 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:83 Sunday 09 June 2024 04:17:55 +0000 (0:00:00.012) 0:02:52.357 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:95 Sunday 09 June 2024 04:17:55 +0000 (0:00:00.014) 0:02:52.372 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the mount fs type] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:110 Sunday 09 June 2024 04:17:55 +0000 (0:00:00.013) 0:02:52.385 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Get path of test volume device] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:122 Sunday 09 June 2024 04:17:55 +0000 (0:00:00.016) 0:02:52.401 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:128 Sunday 09 June 2024 04:17:55 +0000 (0:00:00.012) 0:02:52.414 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:134 Sunday 09 June 2024 04:17:55 +0000 (0:00:00.012) 0:02:52.427 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:146 Sunday 09 June 2024 04:17:55 +0000 (0:00:00.012) 0:02:52.439 *********** ok: [sut] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_device_matches": null, "storage_test_mount_expected_match_count": null, "storage_test_mount_point_matches": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:2 Sunday 09 June 2024 04:17:55 +0000 (0:00:00.012) 0:02:52.452 *********** ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "UUID=dfdb6909-6730-45cd-8755-b0c1764822fa " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:40 Sunday 09 June 2024 04:17:55 +0000 (0:00:00.028) 0:02:52.481 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:48 Sunday 09 June 2024 04:17:55 +0000 (0:00:00.017) 0:02:52.499 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:58 Sunday 09 June 2024 04:17:55 +0000 (0:00:00.017) 0:02:52.517 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:71 Sunday 09 June 2024 04:17:55 +0000 (0:00:00.012) 0:02:52.529 *********** ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml:3 Sunday 09 June 2024 04:17:55 +0000 (0:00:00.012) 0:02:52.542 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml:12 Sunday 09 June 2024 04:17:55 +0000 (0:00:00.050) 0:02:52.593 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:3 Sunday 09 June 2024 04:17:55 +0000 (0:00:00.020) 0:02:52.614 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906670.4764526, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1717906670.4764526, "dev": 5, "device_type": 2049, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 896, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/blockdevice", "mode": "0660", "mtime": 1717906670.4764526, "nlink": 1, "path": "/dev/sda1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:9 Sunday 09 June 2024 04:17:55 +0000 (0:00:00.181) 0:02:52.795 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:16 Sunday 09 June 2024 04:17:55 +0000 (0:00:00.018) 0:02:52.814 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:24 Sunday 09 June 2024 04:17:55 +0000 (0:00:00.014) 0:02:52.829 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:30 Sunday 09 June 2024 04:17:55 +0000 (0:00:00.016) 0:02:52.846 *********** ok: [sut] => { "ansible_facts": { "st_volume_type": "partition" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:34 Sunday 09 June 2024 04:17:55 +0000 (0:00:00.016) 0:02:52.862 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:39 Sunday 09 June 2024 04:17:55 +0000 (0:00:00.013) 0:02:52.875 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:3 Sunday 09 June 2024 04:17:55 +0000 (0:00:00.017) 0:02:52.893 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:10 Sunday 09 June 2024 04:17:55 +0000 (0:00:00.015) 0:02:52.908 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:17 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.863) 0:02:53.771 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:23 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.014) 0:02:53.786 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:32 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.013) 0:02:53.799 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:45 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.019) 0:02:53.818 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:51 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.016) 0:02:53.835 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:56 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.014) 0:02:53.849 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:69 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.014) 0:02:53.864 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:81 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.013) 0:02:53.877 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:94 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.013) 0:02:53.891 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:106 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.016) 0:02:53.907 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:114 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.022) 0:02:53.929 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:122 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.013) 0:02:53.942 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:131 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.012) 0:02:53.955 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:140 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.014) 0:02:53.970 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:8 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.013) 0:02:53.983 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:14 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.013) 0:02:53.997 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:21 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.015) 0:02:54.012 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:28 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.013) 0:02:54.026 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:35 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.020) 0:02:54.046 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:45 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.012) 0:02:54.059 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:54 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.013) 0:02:54.073 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:63 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.013) 0:02:54.086 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:72 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.014) 0:02:54.101 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:81 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.013) 0:02:54.114 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:3 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.013) 0:02:54.128 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested size of the volume] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:11 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.014) 0:02:54.142 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:20 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.020) 0:02:54.163 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:28 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.016) 0:02:54.179 *********** ok: [sut] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Get the size of parent/pool device] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:32 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.015) 0:02:54.194 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show test pool] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:46 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.013) 0:02:54.208 *********** skipping: [sut] => {} TASK [Show test blockinfo] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:50 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.013) 0:02:54.222 *********** skipping: [sut] => {} TASK [Show test pool size] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:54 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.013) 0:02:54.236 *********** skipping: [sut] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:58 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.012) 0:02:54.249 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:68 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.012) 0:02:54.261 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:72 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.012) 0:02:54.274 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:77 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.012) 0:02:54.287 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:83 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.011) 0:02:54.299 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:88 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.011) 0:02:54.310 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:96 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.011) 0:02:54.322 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:104 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.011) 0:02:54.333 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:109 Sunday 09 June 2024 04:17:56 +0000 (0:00:00.012) 0:02:54.346 *********** skipping: [sut] => {} TASK [Show volume thin pool size] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:113 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.012) 0:02:54.358 *********** skipping: [sut] => {} TASK [Show test volume size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:117 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.011) 0:02:54.370 *********** skipping: [sut] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:121 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.011) 0:02:54.382 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:129 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.011) 0:02:54.393 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:138 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.012) 0:02:54.406 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:142 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.012) 0:02:54.418 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:150 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.012) 0:02:54.431 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:156 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.012) 0:02:54.444 *********** ok: [sut] => { "storage_test_actual_size": { "changed": false, "skip_reason": "Conditional result was False", "skipped": true } } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:160 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.014) 0:02:54.458 *********** ok: [sut] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Assert expected size is actual size] ************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:164 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.013) 0:02:54.472 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:5 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.013) 0:02:54.485 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:13 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.013) 0:02:54.499 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:18 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.012) 0:02:54.512 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV cache size] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:27 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.012) 0:02:54.525 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:35 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.012) 0:02:54.537 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:41 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.012) 0:02:54.550 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:47 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.012) 0:02:54.562 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:27 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.013) 0:02:54.576 *********** ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:44 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.011) 0:02:54.588 *********** TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:54 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.011) 0:02:54.599 *********** ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Create a file] *********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/create-test-file.yml:12 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.011) 0:02:54.611 *********** changed: [sut] => { "changed": true, "dest": "/opt/test1/quux", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "secontext": "unconfined_u:object_r:unlabeled_t:s0", "size": 0, "state": "file", "uid": 0 } TASK [Test for correct handling of safe_mode] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:269 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.180) 0:02:54.791 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml for sut TASK [Store global variable value copy] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:4 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.063) 0:02:54.855 *********** ok: [sut] => { "ansible_facts": { "storage_pools_global": [], "storage_safe_mode_global": true, "storage_volumes_global": [] }, "changed": false } TASK [Verify role raises correct error] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:10 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.016) 0:02:54.872 *********** TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.016) 0:02:54.888 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.017) 0:02:54.906 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.014) 0:02:54.921 *********** skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [linux-system-roles.storage : Check if system is ostree] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:26 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.032) 0:02:54.954 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set flag to indicate system is ostree] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:31 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.012) 0:02:54.967 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.012) 0:02:54.979 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.011) 0:02:54.991 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.012) 0:02:55.003 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Sunday 09 June 2024 04:17:57 +0000 (0:00:00.025) 0:02:55.028 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kmod-kvdo libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet vdo xfsprogs TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:11 Sunday 09 June 2024 04:17:58 +0000 (0:00:00.869) 0:02:55.898 *********** ok: [sut] => { "storage_pools": [ { "disks": [ "sda" ], "name": "foo", "type": "partition", "volumes": [ { "encryption": true, "encryption_password": "yabbadabbadoo", "mount_point": "/opt/test1", "name": "test1", "size": "4g", "type": "partition" } ] } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:16 Sunday 09 June 2024 04:17:58 +0000 (0:00:00.016) 0:02:55.914 *********** ok: [sut] => { "storage_volumes": [] } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:21 Sunday 09 June 2024 04:17:58 +0000 (0:00:00.016) 0:02:55.931 *********** ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "cryptsetup" ], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:34 Sunday 09 June 2024 04:17:59 +0000 (0:00:01.015) 0:02:56.946 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for sut TASK [linux-system-roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Sunday 09 June 2024 04:17:59 +0000 (0:00:00.022) 0:02:56.969 *********** TASK [linux-system-roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Sunday 09 June 2024 04:17:59 +0000 (0:00:00.012) 0:02:56.982 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable COPRs] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:20 Sunday 09 June 2024 04:17:59 +0000 (0:00:00.015) 0:02:56.997 *********** TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:41 Sunday 09 June 2024 04:17:59 +0000 (0:00:00.013) 0:02:57.010 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup kpartx TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 Sunday 09 June 2024 04:18:00 +0000 (0:00:00.876) 0:02:57.887 *********** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service": { "name": "systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service", "source": "systemd", "state": "stopped", "status": "generated" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:64 Sunday 09 June 2024 04:18:02 +0000 (0:00:01.482) 0:02:59.369 *********** ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [ "systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service" ] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:78 Sunday 09 June 2024 04:18:02 +0000 (0:00:00.022) 0:02:59.392 *********** changed: [sut] => (item=systemd-cryptsetup@luks\x2ddb2c80a8\x2d0ac5\x2d4d72\x2dacc4\x2dd674a0d22fa9.service) => { "ansible_loop_var": "item", "changed": true, "item": "systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service", "name": "systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "systemd-udevd-kernel.socket dev-sda1.device \"system-systemd\\\\x2dcryptsetup.slice\" cryptsetup-pre.target systemd-journald.socket", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "umount.target cryptsetup.target \"blockdev@dev-mapper-luks\\\\x2ddb2c80a8\\\\x2d0ac5\\\\x2d4d72\\\\x2dacc4\\\\x2dd674a0d22fa9.target\"", "BindsTo": "dev-sda1.device", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "umount.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "no", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Cryptography Setup for luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9", "DevicePolicy": "auto", "Documentation": "\"man:crypttab(5)\" \"man:systemd-cryptsetup-generator(8)\" \"man:systemd-cryptsetup@.service(8)\"", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup attach luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9 /dev/sda1 - ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup attach luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9 /dev/sda1 - ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup detach luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup detach luks-db2c80a8-0ac5-4d72-acc4-d674a0d22fa9 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service", "IgnoreOnIsolate": "yes", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "shared", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13972", "LimitNPROCSoft": "13972", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13972", "LimitSIGPENDINGSoft": "13972", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "\"systemd-cryptsetup@luks\\\\x2ddb2c80a8\\\\x2d0ac5\\\\x2d4d72\\\\x2dacc4\\\\x2dd674a0d22fa9.service\"", "NeedDaemonReload": "yes", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "500", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "RequiredBy": "cryptsetup.target", "Requires": "\"system-systemd\\\\x2dcryptsetup.slice\"", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system-systemd\\x2dcryptsetup.slice", "SourcePath": "/etc/crypttab", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sun 2024-06-09 04:17:43 UTC", "StateChangeTimestampMonotonic": "604124159", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22356", "TimeoutAbortUSec": "infinity", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "infinity", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "\"blockdev@dev-mapper-luks\\\\x2ddb2c80a8\\\\x2d0ac5\\\\x2d4d72\\\\x2dacc4\\\\x2dd674a0d22fa9.target\"", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 Sunday 09 June 2024 04:18:02 +0000 (0:00:00.511) 0:02:59.903 *********** fatal: [sut]: FAILED! => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } MSG: cannot remove existing formatting on device 'sda1' in safe mode due to adding encryption TASK [linux-system-roles.storage : Failed message] ***************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:106 Sunday 09 June 2024 04:18:03 +0000 (0:00:01.017) 0:03:00.921 *********** fatal: [sut]: FAILED! => { "changed": false } MSG: {'msg': "cannot remove existing formatting on device 'sda1' in safe mode due to adding encryption", 'changed': False, 'actions': [], 'leaves': [], 'mounts': [], 'crypts': [], 'pools': [], 'volumes': [], 'packages': [], 'failed': True, 'invocation': {'module_args': {'pools': [{'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'partition', 'volumes': [{'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'partition', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None}]}], 'volumes': [], 'use_partitions': None, 'disklabel_type': None, 'pool_defaults': {'state': 'present', 'type': 'lvm', 'disks': [], 'volumes': [], 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'shared': False}, 'volume_defaults': {'state': 'present', 'type': 'lvm', 'size': 0, 'disks': [], 'fs_type': 'xfs', 'fs_label': '', 'fs_create_options': '', 'fs_overwrite_existing': True, 'mount_point': '', 'mount_options': 'defaults', 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_stripe_size': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': []}, 'safe_mode': True, 'packages_only': False, 'diskvolume_mkfs_option_map': {}}}, '_ansible_no_log': False} TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Sunday 09 June 2024 04:18:03 +0000 (0:00:00.017) 0:03:00.938 *********** changed: [sut] => (item=systemd-cryptsetup@luks\x2ddb2c80a8\x2d0ac5\x2d4d72\x2dacc4\x2dd674a0d22fa9.service) => { "ansible_loop_var": "item", "changed": true, "item": "systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service", "name": "systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "no", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service", "DevicePolicy": "auto", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/etc/systemd/system/systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "463069184", "LimitMEMLOCKSoft": "463069184", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "1073741816", "LimitNOFILESoft": "1073741816", "LimitNPROC": "13972", "LimitNPROCSoft": "13972", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13972", "LimitSIGPENDINGSoft": "13972", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadError": "org.freedesktop.systemd1.UnitMasked \"Unit systemd-cryptsetup@luks\\x2ddb2c80a8\\x2d0ac5\\x2d4d72\\x2dacc4\\x2dd674a0d22fa9.service is masked.\"", "LoadState": "masked", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "\"systemd-cryptsetup@luks\\\\x2ddb2c80a8\\\\x2d0ac5\\\\x2d4d72\\\\x2dacc4\\\\x2dd674a0d22fa9.service\"", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "inherit", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22356", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "masked", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [Check that we failed in the role] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:29 Sunday 09 June 2024 04:18:04 +0000 (0:00:00.510) 0:03:01.449 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the blivet output and error message are correct] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:34 Sunday 09 June 2024 04:18:04 +0000 (0:00:00.014) 0:03:01.464 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify correct exception or error message] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:45 Sunday 09 June 2024 04:18:04 +0000 (0:00:00.018) 0:03:01.482 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Stat the file] *********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-data-preservation.yml:11 Sunday 09 June 2024 04:18:04 +0000 (0:00:00.012) 0:03:01.495 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906677.4224756, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1717906677.4224756, "dev": 2049, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0644", "mtime": 1717906677.4224756, "nlink": 1, "path": "/opt/test1/quux", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 0, "uid": 0, "version": "954053751", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Assert file presence] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-data-preservation.yml:16 Sunday 09 June 2024 04:18:04 +0000 (0:00:00.177) 0:03:01.672 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Create a key file] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:295 Sunday 09 June 2024 04:18:04 +0000 (0:00:00.043) 0:03:01.716 *********** ok: [sut] => { "changed": false, "gid": 0, "group": "root", "mode": "0600", "owner": "root", "path": "/tmp/storage_testgvddjwumlukskey", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 0, "state": "file", "uid": 0 } TASK [Write the key into the key file] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:302 Sunday 09 June 2024 04:18:04 +0000 (0:00:00.218) 0:03:01.935 *********** ok: [sut] => { "changed": false, "checksum": "7a4dff3752e2baf5617c57eaac048e2b95e8af91", "dest": "/tmp/storage_testgvddjwumlukskey", "gid": 0, "group": "root", "md5sum": "4ac07b967150835c00d0865161e48744", "mode": "0600", "owner": "root", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 32, "src": "/root/.ansible/tmp/ansible-tmp-1717906684.6193368-121515-237749666688424/source", "state": "file", "uid": 0 } TASK [Add encryption to the volume] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:309 Sunday 09 June 2024 04:18:05 +0000 (0:00:00.530) 0:03:02.465 *********** TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Sunday 09 June 2024 04:18:05 +0000 (0:00:00.018) 0:03:02.484 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Sunday 09 June 2024 04:18:05 +0000 (0:00:00.018) 0:03:02.502 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Sunday 09 June 2024 04:18:05 +0000 (0:00:00.015) 0:03:02.518 *********** skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [linux-system-roles.storage : Check if system is ostree] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:26 Sunday 09 June 2024 04:18:05 +0000 (0:00:00.032) 0:03:02.550 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set flag to indicate system is ostree] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:31 Sunday 09 June 2024 04:18:05 +0000 (0:00:00.012) 0:03:02.563 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Sunday 09 June 2024 04:18:05 +0000 (0:00:00.013) 0:03:02.577 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Sunday 09 June 2024 04:18:05 +0000 (0:00:00.011) 0:03:02.588 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Sunday 09 June 2024 04:18:05 +0000 (0:00:00.011) 0:03:02.600 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Sunday 09 June 2024 04:18:05 +0000 (0:00:00.024) 0:03:02.624 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kmod-kvdo libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet vdo xfsprogs TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:11 Sunday 09 June 2024 04:18:06 +0000 (0:00:00.879) 0:03:03.504 *********** ok: [sut] => { "storage_pools": [ { "disks": [ "sda" ], "name": "foo", "type": "partition", "volumes": [ { "encryption": true, "encryption_key": "/tmp/storage_testgvddjwumlukskey", "mount_point": "/opt/test1", "name": "test1", "size": "4g", "type": "partition" } ] } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:16 Sunday 09 June 2024 04:18:06 +0000 (0:00:00.016) 0:03:03.520 *********** ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:21 Sunday 09 June 2024 04:18:06 +0000 (0:00:00.013) 0:03:03.534 *********** ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "cryptsetup" ], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:34 Sunday 09 June 2024 04:18:07 +0000 (0:00:00.998) 0:03:04.532 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for sut TASK [linux-system-roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Sunday 09 June 2024 04:18:07 +0000 (0:00:00.022) 0:03:04.555 *********** TASK [linux-system-roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Sunday 09 June 2024 04:18:07 +0000 (0:00:00.012) 0:03:04.567 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable COPRs] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:20 Sunday 09 June 2024 04:18:07 +0000 (0:00:00.012) 0:03:04.580 *********** TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:41 Sunday 09 June 2024 04:18:07 +0000 (0:00:00.011) 0:03:04.592 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup kpartx TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 Sunday 09 June 2024 04:18:08 +0000 (0:00:00.863) 0:03:05.455 *********** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:64 Sunday 09 June 2024 04:18:09 +0000 (0:00:01.455) 0:03:06.910 *********** ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:78 Sunday 09 June 2024 04:18:09 +0000 (0:00:00.021) 0:03:06.931 *********** TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 Sunday 09 June 2024 04:18:09 +0000 (0:00:00.012) 0:03:06.944 *********** changed: [sut] => { "actions": [ { "action": "destroy format", "device": "/dev/sda1", "fs_type": "xfs" }, { "action": "create format", "device": "/dev/sda1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "fs_type": "xfs" } ], "changed": true, "crypts": [ { "backing_device": "/dev/sda1", "name": "luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "state": "present" } ], "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test1", "src": "UUID=dfdb6909-6730-45cd-8755-b0c1764822fa", "state": "absent" }, { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "state": "mounted" } ], "packages": [ "xfsprogs", "cryptsetup" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "partition", "volumes": [ { "_device": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "_raw_device": "/dev/sda1", "_raw_kernel_device": "/dev/sda1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "partition", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:98 Sunday 09 June 2024 04:18:20 +0000 (0:00:10.602) 0:03:17.547 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Sunday 09 June 2024 04:18:20 +0000 (0:00:00.012) 0:03:17.560 *********** TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:116 Sunday 09 June 2024 04:18:20 +0000 (0:00:00.011) 0:03:17.571 *********** ok: [sut] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/sda1", "fs_type": "xfs" }, { "action": "create format", "device": "/dev/sda1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "fs_type": "xfs" } ], "changed": true, "crypts": [ { "backing_device": "/dev/sda1", "name": "luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "state": "present" } ], "failed": false, "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test1", "src": "UUID=dfdb6909-6730-45cd-8755-b0c1764822fa", "state": "absent" }, { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "state": "mounted" } ], "packages": [ "xfsprogs", "cryptsetup" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "partition", "volumes": [ { "_device": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "_raw_device": "/dev/sda1", "_raw_kernel_device": "/dev/sda1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "partition", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:125 Sunday 09 June 2024 04:18:20 +0000 (0:00:00.015) 0:03:17.586 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "partition", "volumes": [ { "_device": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "_raw_device": "/dev/sda1", "_raw_kernel_device": "/dev/sda1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "partition", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:129 Sunday 09 June 2024 04:18:20 +0000 (0:00:00.015) 0:03:17.601 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:145 Sunday 09 June 2024 04:18:20 +0000 (0:00:00.014) 0:03:17.616 *********** changed: [sut] => (item={'src': 'UUID=dfdb6909-6730-45cd-8755-b0c1764822fa', 'path': '/opt/test1', 'state': 'absent', 'fstype': 'xfs'}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "fstype": "xfs", "path": "/opt/test1", "src": "UUID=dfdb6909-6730-45cd-8755-b0c1764822fa", "state": "absent" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "UUID=dfdb6909-6730-45cd-8755-b0c1764822fa" } TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:157 Sunday 09 June 2024 04:18:20 +0000 (0:00:00.211) 0:03:17.828 *********** ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:162 Sunday 09 June 2024 04:18:20 +0000 (0:00:00.483) 0:03:18.311 *********** changed: [sut] => (item={'src': '/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623" } TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Sunday 09 June 2024 04:18:21 +0000 (0:00:00.191) 0:03:18.503 *********** skipping: [sut] => (item={'src': '/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:189 Sunday 09 June 2024 04:18:21 +0000 (0:00:00.018) 0:03:18.522 *********** ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:197 Sunday 09 June 2024 04:18:21 +0000 (0:00:00.483) 0:03:19.005 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906674.1014645, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1717906672.8774605, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 106954945, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1717906672.8774605, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "726930035", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:202 Sunday 09 June 2024 04:18:21 +0000 (0:00:00.175) 0:03:19.180 *********** changed: [sut] => (item={'backing_device': '/dev/sda1', 'name': 'luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623', 'password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'state': 'present'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sda1", "name": "luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "state": "present" } } MSG: line added TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:224 Sunday 09 June 2024 04:18:22 +0000 (0:00:00.181) 0:03:19.362 *********** ok: [sut] TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:326 Sunday 09 June 2024 04:18:22 +0000 (0:00:00.653) 0:03:20.015 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:2 Sunday 09 June 2024 04:18:22 +0000 (0:00:00.018) 0:03:20.033 *********** ok: [sut] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "partition", "volumes": [ { "_device": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "_raw_device": "/dev/sda1", "_raw_kernel_device": "/dev/sda1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "partition", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:7 Sunday 09 June 2024 04:18:22 +0000 (0:00:00.017) 0:03:20.051 *********** skipping: [sut] => {} TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:15 Sunday 09 June 2024 04:18:22 +0000 (0:00:00.012) 0:03:20.063 *********** ok: [sut] => { "changed": false, "info": { "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623": { "fstype": "xfs", "label": "", "name": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "size": "10G", "type": "crypt", "uuid": "3048a774-f767-4d4b-9d8a-0fb957e95c2f" }, "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sda1": { "fstype": "crypto_LUKS", "label": "", "name": "/dev/sda1", "size": "10G", "type": "partition", "uuid": "42b7f0ca-bd0f-4a24-823e-8c6207a18623" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "xfs", "label": "", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "82a26900-2b36-49b9-bc49-f4d2f0bb14b8" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:20 Sunday 09 June 2024 04:18:22 +0000 (0:00:00.177) 0:03:20.241 *********** ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.006037", "end": "2024-06-09 04:18:23.044155", "rc": 0, "start": "2024-06-09 04:18:23.038118" } STDOUT: # # /etc/fstab # Created by anaconda on Tue May 28 13:15:44 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=82a26900-2b36-49b9-bc49-f4d2f0bb14b8 / xfs defaults 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623 /opt/test1 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:25 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.175) 0:03:20.416 *********** ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002707", "end": "2024-06-09 04:18:23.217883", "failed_when_result": false, "rc": 0, "start": "2024-06-09 04:18:23.215176" } STDOUT: luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623 /dev/sda1 VALUE_SPECIFIED_IN_NO_LOG_PARAMETER TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:34 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.174) 0:03:20.591 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml for sut TASK [Set _storage_pool_tests] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:5 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.026) 0:03:20.617 *********** ok: [sut] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:18 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.011) 0:03:20.629 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that VG shared value checks out] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:24 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.013) 0:03:20.643 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify pool subset] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:34 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.012) 0:03:20.656 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-volumes.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:2 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.026) 0:03:20.682 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get the canonical device path for each member device] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:13 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.012) 0:03:20.695 *********** TASK [Set pvs lvm length] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:22 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.012) 0:03:20.708 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set pool pvs] ************************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:27 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.012) 0:03:20.721 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify PV count] ********************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:33 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.013) 0:03:20.734 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:42 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.012) 0:03:20.747 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:48 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.012) 0:03:20.760 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:54 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.012) 0:03:20.772 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:59 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.013) 0:03:20.786 *********** TASK [Check MD RAID] *********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:73 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.011) 0:03:20.797 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml for sut TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:8 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.022) 0:03:20.819 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:14 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.012) 0:03:20.832 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:21 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.013) 0:03:20.846 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:28 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.039) 0:03:20.885 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:35 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.013) 0:03:20.899 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:45 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.012) 0:03:20.911 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:54 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.012) 0:03:20.924 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:64 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.012) 0:03:20.937 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:74 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.012) 0:03:20.950 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:85 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.016) 0:03:20.966 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:95 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.013) 0:03:20.979 *********** ok: [sut] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:76 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.015) 0:03:20.995 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-lvmraid.yml for sut TASK [Validate pool member LVM RAID settings] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-lvmraid.yml:2 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.026) 0:03:21.022 *********** skipping: [sut] => (item={'encryption': True, 'encryption_cipher': None, 'encryption_key': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'partition', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623', '_raw_device': '/dev/sda1', '_mount_id': '/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/sda1'}) => { "ansible_loop_var": "storage_test_lvmraid_volume", "changed": false, "skip_reason": "Conditional result was False", "storage_test_lvmraid_volume": { "_device": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "_raw_device": "/dev/sda1", "_raw_kernel_device": "/dev/sda1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "partition", "vdo_pool_size": null } } TASK [Check Thin Pools] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:79 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.021) 0:03:21.044 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-thin.yml for sut TASK [Validate pool member thinpool settings] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-thin.yml:2 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.024) 0:03:21.068 *********** skipping: [sut] => (item={'encryption': True, 'encryption_cipher': None, 'encryption_key': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'partition', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623', '_raw_device': '/dev/sda1', '_mount_id': '/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/sda1'}) => { "ansible_loop_var": "storage_test_thin_volume", "changed": false, "skip_reason": "Conditional result was False", "storage_test_thin_volume": { "_device": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "_raw_device": "/dev/sda1", "_raw_kernel_device": "/dev/sda1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "partition", "vdo_pool_size": null } } TASK [Check member encryption] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:82 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.017) 0:03:21.086 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:5 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.026) 0:03:21.112 *********** ok: [sut] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:13 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.015) 0:03:21.127 *********** TASK [Validate pool member crypttab entries] *********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:20 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.011) 0:03:21.139 *********** TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:27 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.011) 0:03:21.150 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:85 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.011) 0:03:21.162 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-vdo.yml for sut TASK [Validate pool member VDO settings] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-vdo.yml:2 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.026) 0:03:21.189 *********** skipping: [sut] => (item={'encryption': True, 'encryption_cipher': None, 'encryption_key': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'partition', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623', '_raw_device': '/dev/sda1', '_mount_id': '/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/sda1'}) => { "ansible_loop_var": "storage_test_vdo_volume", "changed": false, "skip_reason": "Conditional result was False", "storage_test_vdo_volume": { "_device": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "_raw_device": "/dev/sda1", "_raw_kernel_device": "/dev/sda1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "partition", "vdo_pool_size": null } } TASK [Clean up test variables] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:88 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.016) 0:03:21.206 *********** ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-volumes.yml:3 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.011) 0:03:21.217 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml for sut TASK [Set storage volume test variables] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:2 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.022) 0:03:21.240 *********** ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for {{ storage_test_volume_subset }}] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:21 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.016) 0:03:21.257 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml for sut TASK [Get expected mount device based on device type] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:7 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.056) 0:03:21.313 *********** ok: [sut] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:16 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.015) 0:03:21.329 *********** ok: [sut] => { "ansible_facts": { "storage_test_mount_device_matches": [ { "block_available": 2572529, "block_size": 4096, "block_total": 2598912, "block_used": 26383, "device": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "fstype": "xfs", "inode_available": 5230589, "inode_total": 5230592, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 10537078784, "size_total": 10645143552, "uuid": "3048a774-f767-4d4b-9d8a-0fb957e95c2f" } ], "storage_test_mount_expected_match_count": "1", "storage_test_mount_point_matches": [ { "block_available": 2572529, "block_size": 4096, "block_total": 2598912, "block_used": 26383, "device": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "fstype": "xfs", "inode_available": 5230589, "inode_total": 5230592, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 10537078784, "size_total": 10645143552, "uuid": "3048a774-f767-4d4b-9d8a-0fb957e95c2f" } ], "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:38 Sunday 09 June 2024 04:18:23 +0000 (0:00:00.019) 0:03:21.348 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:51 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.013) 0:03:21.362 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the current mount state by mount point] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:63 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.015) 0:03:21.378 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:71 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.014) 0:03:21.393 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:83 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.012) 0:03:21.405 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:95 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.012) 0:03:21.418 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the mount fs type] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:110 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.012) 0:03:21.431 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Get path of test volume device] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:122 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.017) 0:03:21.448 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:128 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.012) 0:03:21.461 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:134 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.012) 0:03:21.474 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:146 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.012) 0:03:21.486 *********** ok: [sut] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_device_matches": null, "storage_test_mount_expected_match_count": null, "storage_test_mount_point_matches": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:2 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.011) 0:03:21.498 *********** ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:40 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.025) 0:03:21.524 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:48 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.017) 0:03:21.541 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:58 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.015) 0:03:21.557 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:71 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.012) 0:03:21.569 *********** ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml:3 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.011) 0:03:21.581 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml:12 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.017) 0:03:21.598 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:3 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.018) 0:03:21.617 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906699.8055491, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1717906699.8055491, "dev": 5, "device_type": 2049, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 1288, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/blockdevice", "mode": "0660", "mtime": 1717906699.8055491, "nlink": 1, "path": "/dev/sda1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:9 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.176) 0:03:21.794 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:16 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.017) 0:03:21.811 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:24 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.013) 0:03:21.824 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:30 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.015) 0:03:21.840 *********** ok: [sut] => { "ansible_facts": { "st_volume_type": "partition" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:34 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.051) 0:03:21.891 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:39 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.015) 0:03:21.907 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:3 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.015) 0:03:21.922 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906700.1095502, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1717906700.1095502, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 1321, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1717906700.1095502, "nlink": 1, "path": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:10 Sunday 09 June 2024 04:18:24 +0000 (0:00:00.181) 0:03:22.103 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:17 Sunday 09 June 2024 04:18:25 +0000 (0:00:00.863) 0:03:22.966 *********** ok: [sut] => { "changed": false, "cmd": [ "cryptsetup", "luksDump", "/dev/sda1" ], "delta": "0:00:00.006568", "end": "2024-06-09 04:18:25.775769", "rc": 0, "start": "2024-06-09 04:18:25.769201" } STDOUT: LUKS header information Version: 2 Epoch: 3 Metadata area: 16384 [bytes] Keyslots area: 16744448 [bytes] UUID: 42b7f0ca-bd0f-4a24-823e-8c6207a18623 Label: (no label) Subsystem: (no subsystem) Flags: (no flags) Data segments: 0: crypt offset: 16777216 [bytes] length: (whole device) cipher: aes-xts-plain64 sector: 512 [bytes] Keyslots: 0: luks2 Key: 512 bits Priority: normal Cipher: aes-xts-plain64 Cipher key: 512 bits PBKDF: argon2id Time cost: 4 Memory: 666079 Threads: 2 Salt: aa b6 26 8d 03 83 57 b4 9e 00 c5 34 16 9e cb 35 bd 62 ea 02 0c ed 32 c1 5a 3f cd 9e 8b ec a9 aa AF stripes: 4000 AF hash: sha256 Area offset:32768 [bytes] Area length:258048 [bytes] Digest ID: 0 Tokens: Digests: 0: pbkdf2 Hash: sha256 Iterations: 102882 Salt: fb a2 a6 30 bd af bd 3f a0 bf 6b c6 e1 c7 2a ee d2 aa 9d 08 8a b4 23 86 02 43 dd 08 d2 c4 3c c4 Digest: 10 7a 82 23 d3 06 ff 2d e3 63 b4 b4 7e 0c 8a ff 8e 90 97 6d b6 55 c8 b1 4d f8 1f c4 4e ba 40 ad TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:23 Sunday 09 June 2024 04:18:25 +0000 (0:00:00.181) 0:03:23.148 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:32 Sunday 09 June 2024 04:18:25 +0000 (0:00:00.017) 0:03:23.166 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:45 Sunday 09 June 2024 04:18:25 +0000 (0:00:00.017) 0:03:23.184 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:51 Sunday 09 June 2024 04:18:25 +0000 (0:00:00.016) 0:03:23.200 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:56 Sunday 09 June 2024 04:18:25 +0000 (0:00:00.015) 0:03:23.216 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:69 Sunday 09 June 2024 04:18:25 +0000 (0:00:00.014) 0:03:23.230 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:81 Sunday 09 June 2024 04:18:25 +0000 (0:00:00.013) 0:03:23.244 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:94 Sunday 09 June 2024 04:18:25 +0000 (0:00:00.012) 0:03:23.256 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [ "luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623 /dev/sda1 VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" ], "_storage_test_expected_crypttab_entries": "1", "_storage_test_expected_crypttab_key_file": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:106 Sunday 09 June 2024 04:18:25 +0000 (0:00:00.017) 0:03:23.274 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:114 Sunday 09 June 2024 04:18:25 +0000 (0:00:00.016) 0:03:23.290 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:122 Sunday 09 June 2024 04:18:25 +0000 (0:00:00.017) 0:03:23.308 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:131 Sunday 09 June 2024 04:18:25 +0000 (0:00:00.019) 0:03:23.327 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:140 Sunday 09 June 2024 04:18:25 +0000 (0:00:00.018) 0:03:23.346 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:8 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.013) 0:03:23.360 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:14 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.015) 0:03:23.375 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:21 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.013) 0:03:23.388 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:28 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.014) 0:03:23.402 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:35 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.015) 0:03:23.417 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:45 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.014) 0:03:23.432 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:54 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.013) 0:03:23.445 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:63 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.013) 0:03:23.458 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:72 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.014) 0:03:23.472 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:81 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.013) 0:03:23.486 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:3 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.016) 0:03:23.502 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested size of the volume] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:11 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.015) 0:03:23.518 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:20 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.014) 0:03:23.532 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:28 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.014) 0:03:23.547 *********** ok: [sut] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Get the size of parent/pool device] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:32 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.014) 0:03:23.561 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show test pool] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:46 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.014) 0:03:23.575 *********** skipping: [sut] => {} TASK [Show test blockinfo] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:50 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.015) 0:03:23.591 *********** skipping: [sut] => {} TASK [Show test pool size] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:54 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.014) 0:03:23.605 *********** skipping: [sut] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:58 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.014) 0:03:23.619 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:68 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.014) 0:03:23.633 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:72 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.011) 0:03:23.645 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:77 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.011) 0:03:23.657 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:83 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.014) 0:03:23.671 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:88 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.012) 0:03:23.684 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:96 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.012) 0:03:23.697 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:104 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.012) 0:03:23.710 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:109 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.012) 0:03:23.723 *********** skipping: [sut] => {} TASK [Show volume thin pool size] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:113 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.012) 0:03:23.735 *********** skipping: [sut] => {} TASK [Show test volume size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:117 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.014) 0:03:23.749 *********** skipping: [sut] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:121 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.012) 0:03:23.762 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:129 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.012) 0:03:23.775 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:138 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.012) 0:03:23.788 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:142 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.012) 0:03:23.800 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:150 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.012) 0:03:23.813 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:156 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.014) 0:03:23.827 *********** ok: [sut] => { "storage_test_actual_size": { "changed": false, "skip_reason": "Conditional result was False", "skipped": true } } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:160 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.015) 0:03:23.843 *********** ok: [sut] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Assert expected size is actual size] ************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:164 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.014) 0:03:23.857 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:5 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.014) 0:03:23.871 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:13 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.012) 0:03:23.884 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:18 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.012) 0:03:23.896 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV cache size] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:27 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.014) 0:03:23.910 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:35 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.012) 0:03:23.923 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:41 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.012) 0:03:23.936 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:47 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.012) 0:03:23.949 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:27 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.012) 0:03:23.962 *********** ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:44 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.011) 0:03:23.974 *********** TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:54 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.013) 0:03:23.987 *********** ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Remove the key file] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:329 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.044) 0:03:24.032 *********** ok: [sut] => { "changed": false, "path": "/tmp/storage_testgvddjwumlukskey", "state": "absent" } TASK [Test for correct handling of new encrypted volume w/ no key] ************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:339 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.181) 0:03:24.214 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml for sut TASK [Store global variable value copy] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:4 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.020) 0:03:24.234 *********** ok: [sut] => { "ansible_facts": { "storage_pools_global": [], "storage_safe_mode_global": true, "storage_volumes_global": [] }, "changed": false } TASK [Verify role raises correct error] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:10 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.016) 0:03:24.251 *********** TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.018) 0:03:24.269 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.020) 0:03:24.289 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.017) 0:03:24.307 *********** skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [linux-system-roles.storage : Check if system is ostree] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:26 Sunday 09 June 2024 04:18:26 +0000 (0:00:00.037) 0:03:24.344 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set flag to indicate system is ostree] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:31 Sunday 09 June 2024 04:18:27 +0000 (0:00:00.018) 0:03:24.363 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Sunday 09 June 2024 04:18:27 +0000 (0:00:00.019) 0:03:24.383 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Sunday 09 June 2024 04:18:27 +0000 (0:00:00.015) 0:03:24.398 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Sunday 09 June 2024 04:18:27 +0000 (0:00:00.011) 0:03:24.410 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Sunday 09 June 2024 04:18:27 +0000 (0:00:00.026) 0:03:24.436 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kmod-kvdo libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet vdo xfsprogs TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:11 Sunday 09 June 2024 04:18:27 +0000 (0:00:00.867) 0:03:25.304 *********** ok: [sut] => { "storage_pools": [ { "disks": [ "sda" ], "name": "foo", "type": "lvm", "volumes": [ { "encryption": true, "mount_point": "/opt/test1", "name": "test1", "size": "4g" } ] } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:16 Sunday 09 June 2024 04:18:27 +0000 (0:00:00.016) 0:03:25.320 *********** ok: [sut] => { "storage_volumes": [] } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:21 Sunday 09 June 2024 04:18:27 +0000 (0:00:00.016) 0:03:25.336 *********** ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "cryptsetup", "lvm2" ], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:34 Sunday 09 June 2024 04:18:29 +0000 (0:00:01.090) 0:03:26.426 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for sut TASK [linux-system-roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Sunday 09 June 2024 04:18:29 +0000 (0:00:00.023) 0:03:26.449 *********** TASK [linux-system-roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Sunday 09 June 2024 04:18:29 +0000 (0:00:00.012) 0:03:26.462 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable COPRs] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:20 Sunday 09 June 2024 04:18:29 +0000 (0:00:00.013) 0:03:26.475 *********** TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:41 Sunday 09 June 2024 04:18:29 +0000 (0:00:00.011) 0:03:26.487 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup kpartx lvm2 TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 Sunday 09 June 2024 04:18:30 +0000 (0:00:00.865) 0:03:27.352 *********** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:64 Sunday 09 June 2024 04:18:31 +0000 (0:00:01.475) 0:03:28.828 *********** ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:78 Sunday 09 June 2024 04:18:31 +0000 (0:00:00.020) 0:03:28.849 *********** TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 Sunday 09 June 2024 04:18:31 +0000 (0:00:00.011) 0:03:28.861 *********** fatal: [sut]: FAILED! => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } MSG: encrypted volume 'test1' missing key/password TASK [linux-system-roles.storage : Failed message] ***************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:106 Sunday 09 June 2024 04:18:32 +0000 (0:00:01.169) 0:03:30.030 *********** fatal: [sut]: FAILED! => { "changed": false } MSG: {'msg': "encrypted volume 'test1' missing key/password", 'changed': False, 'actions': [], 'leaves': [], 'mounts': [], 'crypts': [], 'pools': [], 'volumes': [], 'packages': [], 'failed': True, 'invocation': {'module_args': {'pools': [{'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None}]}], 'volumes': [], 'use_partitions': None, 'disklabel_type': None, 'pool_defaults': {'state': 'present', 'type': 'lvm', 'disks': [], 'volumes': [], 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'shared': False}, 'volume_defaults': {'state': 'present', 'type': 'lvm', 'size': 0, 'disks': [], 'fs_type': 'xfs', 'fs_label': '', 'fs_create_options': '', 'fs_overwrite_existing': True, 'mount_point': '', 'mount_options': 'defaults', 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_stripe_size': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': []}, 'safe_mode': False, 'packages_only': False, 'diskvolume_mkfs_option_map': {}}}, '_ansible_no_log': False} TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Sunday 09 June 2024 04:18:32 +0000 (0:00:00.016) 0:03:30.047 *********** TASK [Check that we failed in the role] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:29 Sunday 09 June 2024 04:18:32 +0000 (0:00:00.012) 0:03:30.059 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the blivet output and error message are correct] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:34 Sunday 09 June 2024 04:18:32 +0000 (0:00:00.016) 0:03:30.076 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify correct exception or error message] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:45 Sunday 09 June 2024 04:18:32 +0000 (0:00:00.019) 0:03:30.096 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Create an encrypted lvm volume w/ default fs] **************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:357 Sunday 09 June 2024 04:18:32 +0000 (0:00:00.016) 0:03:30.112 *********** TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Sunday 09 June 2024 04:18:32 +0000 (0:00:00.062) 0:03:30.174 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Sunday 09 June 2024 04:18:32 +0000 (0:00:00.018) 0:03:30.193 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Sunday 09 June 2024 04:18:32 +0000 (0:00:00.015) 0:03:30.209 *********** skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [linux-system-roles.storage : Check if system is ostree] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:26 Sunday 09 June 2024 04:18:32 +0000 (0:00:00.033) 0:03:30.242 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set flag to indicate system is ostree] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:31 Sunday 09 June 2024 04:18:32 +0000 (0:00:00.015) 0:03:30.258 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Sunday 09 June 2024 04:18:32 +0000 (0:00:00.014) 0:03:30.272 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Sunday 09 June 2024 04:18:32 +0000 (0:00:00.012) 0:03:30.285 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Sunday 09 June 2024 04:18:32 +0000 (0:00:00.014) 0:03:30.299 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Sunday 09 June 2024 04:18:32 +0000 (0:00:00.037) 0:03:30.337 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kmod-kvdo libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet vdo xfsprogs TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:11 Sunday 09 June 2024 04:18:33 +0000 (0:00:00.857) 0:03:31.194 *********** ok: [sut] => { "storage_pools": [ { "disks": [ "sda" ], "name": "foo", "type": "lvm", "volumes": [ { "encryption": true, "encryption_cipher": "aes-xts-plain64", "encryption_key_size": 512, "encryption_luks_version": "luks1", "encryption_password": "yabbadabbadoo", "mount_point": "/opt/test1", "name": "test1", "size": "4g" } ] } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:16 Sunday 09 June 2024 04:18:33 +0000 (0:00:00.019) 0:03:31.214 *********** ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:21 Sunday 09 June 2024 04:18:33 +0000 (0:00:00.013) 0:03:31.228 *********** ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "cryptsetup", "lvm2" ], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:34 Sunday 09 June 2024 04:18:34 +0000 (0:00:01.096) 0:03:32.324 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for sut TASK [linux-system-roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Sunday 09 June 2024 04:18:34 +0000 (0:00:00.022) 0:03:32.347 *********** TASK [linux-system-roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Sunday 09 June 2024 04:18:35 +0000 (0:00:00.011) 0:03:32.359 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable COPRs] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:20 Sunday 09 June 2024 04:18:35 +0000 (0:00:00.013) 0:03:32.372 *********** TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:41 Sunday 09 June 2024 04:18:35 +0000 (0:00:00.013) 0:03:32.385 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup kpartx lvm2 TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 Sunday 09 June 2024 04:18:35 +0000 (0:00:00.863) 0:03:33.249 *********** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:64 Sunday 09 June 2024 04:18:37 +0000 (0:00:01.461) 0:03:34.711 *********** ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:78 Sunday 09 June 2024 04:18:37 +0000 (0:00:00.021) 0:03:34.732 *********** TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 Sunday 09 June 2024 04:18:37 +0000 (0:00:00.011) 0:03:34.744 *********** changed: [sut] => { "actions": [ { "action": "destroy format", "device": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sda1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "disklabel" }, { "action": "create format", "device": "/dev/sda", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/foo", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/foo-test1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "fs_type": "xfs" } ], "changed": true, "crypts": [ { "backing_device": "/dev/sda1", "name": "luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "password": "-", "state": "absent" }, { "backing_device": "/dev/mapper/foo-test1", "name": "luks-25f92b54-a245-4387-8b24-afa23facba1b", "password": "-", "state": "present" } ], "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "state": "absent" }, { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "state": "mounted" } ], "packages": [ "cryptsetup", "xfsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": true, "encryption_cipher": "aes-xts-plain64", "encryption_key": null, "encryption_key_size": 512, "encryption_luks_version": "luks1", "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:98 Sunday 09 June 2024 04:18:45 +0000 (0:00:08.025) 0:03:42.769 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Sunday 09 June 2024 04:18:45 +0000 (0:00:00.013) 0:03:42.782 *********** TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:116 Sunday 09 June 2024 04:18:45 +0000 (0:00:00.012) 0:03:42.795 *********** ok: [sut] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sda1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "disklabel" }, { "action": "create format", "device": "/dev/sda", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/foo", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/foo-test1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "fs_type": "xfs" } ], "changed": true, "crypts": [ { "backing_device": "/dev/sda1", "name": "luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "password": "-", "state": "absent" }, { "backing_device": "/dev/mapper/foo-test1", "name": "luks-25f92b54-a245-4387-8b24-afa23facba1b", "password": "-", "state": "present" } ], "failed": false, "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "state": "absent" }, { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "state": "mounted" } ], "packages": [ "cryptsetup", "xfsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": true, "encryption_cipher": "aes-xts-plain64", "encryption_key": null, "encryption_key_size": 512, "encryption_luks_version": "luks1", "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:125 Sunday 09 June 2024 04:18:45 +0000 (0:00:00.016) 0:03:42.812 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": true, "encryption_cipher": "aes-xts-plain64", "encryption_key": null, "encryption_key_size": 512, "encryption_luks_version": "luks1", "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:129 Sunday 09 June 2024 04:18:45 +0000 (0:00:00.015) 0:03:42.828 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:145 Sunday 09 June 2024 04:18:45 +0000 (0:00:00.014) 0:03:42.842 *********** changed: [sut] => (item={'src': '/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623', 'path': '/opt/test1', 'state': 'absent', 'fstype': 'xfs'}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "state": "absent" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623" } TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:157 Sunday 09 June 2024 04:18:45 +0000 (0:00:00.181) 0:03:43.023 *********** ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:162 Sunday 09 June 2024 04:18:46 +0000 (0:00:00.483) 0:03:43.507 *********** changed: [sut] => (item={'src': '/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b" } TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Sunday 09 June 2024 04:18:46 +0000 (0:00:00.196) 0:03:43.703 *********** skipping: [sut] => (item={'src': '/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:189 Sunday 09 June 2024 04:18:46 +0000 (0:00:00.017) 0:03:43.721 *********** ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:197 Sunday 09 June 2024 04:18:46 +0000 (0:00:00.482) 0:03:44.203 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906703.2165606, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "54ffea08f6c95f52405a96d31be5047c0b0ee081", "ctime": 1717906701.9855564, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 174063810, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0600", "mtime": 1717906701.9855564, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 88, "uid": 0, "version": "2866772283", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:202 Sunday 09 June 2024 04:18:47 +0000 (0:00:00.178) 0:03:44.382 *********** changed: [sut] => (item={'backing_device': '/dev/sda1', 'name': 'luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623', 'password': '-', 'state': 'absent'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sda1", "name": "luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "password": "-", "state": "absent" }, "found": 1 } MSG: 1 line(s) removed changed: [sut] => (item={'backing_device': '/dev/mapper/foo-test1', 'name': 'luks-25f92b54-a245-4387-8b24-afa23facba1b', 'password': '-', 'state': 'present'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/mapper/foo-test1", "name": "luks-25f92b54-a245-4387-8b24-afa23facba1b", "password": "-", "state": "present" } } MSG: line added TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:224 Sunday 09 June 2024 04:18:47 +0000 (0:00:00.356) 0:03:44.739 *********** ok: [sut] TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:376 Sunday 09 June 2024 04:18:47 +0000 (0:00:00.601) 0:03:45.341 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:2 Sunday 09 June 2024 04:18:48 +0000 (0:00:00.020) 0:03:45.361 *********** ok: [sut] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": true, "encryption_cipher": "aes-xts-plain64", "encryption_key": null, "encryption_key_size": 512, "encryption_luks_version": "luks1", "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:7 Sunday 09 June 2024 04:18:48 +0000 (0:00:00.017) 0:03:45.379 *********** skipping: [sut] => {} TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:15 Sunday 09 June 2024 04:18:48 +0000 (0:00:00.012) 0:03:45.392 *********** ok: [sut] => { "changed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "crypto_LUKS", "label": "", "name": "/dev/mapper/foo-test1", "size": "4G", "type": "lvm", "uuid": "25f92b54-a245-4387-8b24-afa23facba1b" }, "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b": { "fstype": "xfs", "label": "", "name": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "size": "4G", "type": "crypt", "uuid": "b490495f-38b8-4761-84a8-8d2e9c2b39a7" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "1agGsq-N3WZ-Sn5I-mLjB-j4Aw-8XPP-3ZXjSl" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "xfs", "label": "", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "82a26900-2b36-49b9-bc49-f4d2f0bb14b8" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:20 Sunday 09 June 2024 04:18:48 +0000 (0:00:00.175) 0:03:45.567 *********** ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002443", "end": "2024-06-09 04:18:48.367905", "rc": 0, "start": "2024-06-09 04:18:48.365462" } STDOUT: # # /etc/fstab # Created by anaconda on Tue May 28 13:15:44 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=82a26900-2b36-49b9-bc49-f4d2f0bb14b8 / xfs defaults 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b /opt/test1 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:25 Sunday 09 June 2024 04:18:48 +0000 (0:00:00.170) 0:03:45.738 *********** ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002760", "end": "2024-06-09 04:18:48.536778", "failed_when_result": false, "rc": 0, "start": "2024-06-09 04:18:48.534018" } STDOUT: luks-25f92b54-a245-4387-8b24-afa23facba1b /dev/mapper/foo-test1 - TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:34 Sunday 09 June 2024 04:18:48 +0000 (0:00:00.170) 0:03:45.908 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml for sut TASK [Set _storage_pool_tests] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:5 Sunday 09 June 2024 04:18:48 +0000 (0:00:00.025) 0:03:45.934 *********** ok: [sut] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:18 Sunday 09 June 2024 04:18:48 +0000 (0:00:00.012) 0:03:45.946 *********** ok: [sut] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.031699", "end": "2024-06-09 04:18:48.780931", "rc": 0, "start": "2024-06-09 04:18:48.749232" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:24 Sunday 09 June 2024 04:18:48 +0000 (0:00:00.204) 0:03:46.151 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:34 Sunday 09 June 2024 04:18:48 +0000 (0:00:00.018) 0:03:46.169 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-volumes.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:2 Sunday 09 June 2024 04:18:48 +0000 (0:00:00.027) 0:03:46.197 *********** ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:13 Sunday 09 June 2024 04:18:48 +0000 (0:00:00.017) 0:03:46.215 *********** ok: [sut] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:22 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.259) 0:03:46.474 *********** ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:27 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.015) 0:03:46.490 *********** ok: [sut] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:33 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.016) 0:03:46.506 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:42 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.015) 0:03:46.522 *********** ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:48 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.015) 0:03:46.538 *********** ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:54 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.014) 0:03:46.552 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:59 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.012) 0:03:46.565 *********** ok: [sut] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check MD RAID] *********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:73 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.018) 0:03:46.584 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml for sut TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:8 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.023) 0:03:46.607 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:14 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.015) 0:03:46.623 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:21 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.012) 0:03:46.635 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:28 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.012) 0:03:46.648 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:35 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.012) 0:03:46.661 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:45 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.012) 0:03:46.673 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:54 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.012) 0:03:46.686 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:64 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.012) 0:03:46.699 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:74 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.011) 0:03:46.710 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:85 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.012) 0:03:46.723 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:95 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.012) 0:03:46.736 *********** ok: [sut] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:76 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.011) 0:03:46.747 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-lvmraid.yml for sut TASK [Validate pool member LVM RAID settings] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-lvmraid.yml:2 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.024) 0:03:46.772 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml for sut TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:8 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.024) 0:03:46.797 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:16 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.012) 0:03:46.809 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:21 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.049) 0:03:46.859 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:29 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.014) 0:03:46.873 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:34 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.013) 0:03:46.887 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:40 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.013) 0:03:46.901 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:46 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.014) 0:03:46.916 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:79 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.013) 0:03:46.929 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-thin.yml for sut TASK [Validate pool member thinpool settings] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-thin.yml:2 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.028) 0:03:46.958 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-thin.yml for sut TASK [Get information about thinpool] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-thin.yml:8 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.028) 0:03:46.986 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-thin.yml:16 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.015) 0:03:47.002 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-thin.yml:23 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.013) 0:03:47.015 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-thin.yml:27 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.013) 0:03:47.029 *********** ok: [sut] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:82 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.013) 0:03:47.043 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:5 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.028) 0:03:47.072 *********** ok: [sut] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:13 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.016) 0:03:47.088 *********** skipping: [sut] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:20 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.016) 0:03:47.104 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml for sut TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml:2 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.024) 0:03:47.129 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml:9 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.016) 0:03:47.146 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml:18 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.017) 0:03:47.164 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml:27 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.013) 0:03:47.177 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml:37 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.013) 0:03:47.190 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml:47 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.013) 0:03:47.203 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:27 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.012) 0:03:47.216 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:85 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.011) 0:03:47.227 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-vdo.yml for sut TASK [Validate pool member VDO settings] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-vdo.yml:2 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.027) 0:03:47.255 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml for sut TASK [Get information about VDO deduplication] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:9 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.026) 0:03:47.282 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:16 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.015) 0:03:47.298 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:22 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.013) 0:03:47.311 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:28 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.013) 0:03:47.324 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:35 Sunday 09 June 2024 04:18:49 +0000 (0:00:00.013) 0:03:47.337 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:41 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.013) 0:03:47.351 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:47 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.012) 0:03:47.364 *********** ok: [sut] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:88 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.014) 0:03:47.378 *********** ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-volumes.yml:3 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.013) 0:03:47.391 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml for sut TASK [Set storage volume test variables] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:2 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.024) 0:03:47.415 *********** ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for {{ storage_test_volume_subset }}] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:21 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.016) 0:03:47.432 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml for sut TASK [Get expected mount device based on device type] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:7 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.061) 0:03:47.493 *********** ok: [sut] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:16 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.016) 0:03:47.509 *********** ok: [sut] => { "ansible_facts": { "storage_test_mount_device_matches": [ { "block_available": 1014697, "block_size": 4096, "block_total": 1030144, "block_used": 15447, "device": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "fstype": "xfs", "inode_available": 2093053, "inode_total": 2093056, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 4156198912, "size_total": 4219469824, "uuid": "b490495f-38b8-4761-84a8-8d2e9c2b39a7" } ], "storage_test_mount_expected_match_count": "1", "storage_test_mount_point_matches": [ { "block_available": 1014697, "block_size": 4096, "block_total": 1030144, "block_used": 15447, "device": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "fstype": "xfs", "inode_available": 2093053, "inode_total": 2093056, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 4156198912, "size_total": 4219469824, "uuid": "b490495f-38b8-4761-84a8-8d2e9c2b39a7" } ], "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:38 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.021) 0:03:47.530 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:51 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.012) 0:03:47.543 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the current mount state by mount point] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:63 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.016) 0:03:47.559 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:71 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.017) 0:03:47.577 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:83 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.015) 0:03:47.592 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:95 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.013) 0:03:47.606 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the mount fs type] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:110 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.013) 0:03:47.620 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Get path of test volume device] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:122 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.017) 0:03:47.638 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:128 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.013) 0:03:47.652 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:134 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.054) 0:03:47.706 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:146 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.015) 0:03:47.721 *********** ok: [sut] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_device_matches": null, "storage_test_mount_expected_match_count": null, "storage_test_mount_point_matches": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:2 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.013) 0:03:47.735 *********** ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:40 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.027) 0:03:47.763 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:48 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.017) 0:03:47.780 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:58 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.017) 0:03:47.797 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:71 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.014) 0:03:47.811 *********** ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml:3 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.013) 0:03:47.825 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml:12 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.017) 0:03:47.842 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:3 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.019) 0:03:47.862 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906725.0186324, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1717906725.0186324, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 1546, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1717906725.0186324, "nlink": 1, "path": "/dev/mapper/foo-test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:9 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.179) 0:03:48.041 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:16 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.018) 0:03:48.060 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:24 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.015) 0:03:48.075 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:30 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.017) 0:03:48.092 *********** ok: [sut] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:34 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.014) 0:03:48.107 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:39 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.014) 0:03:48.122 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:3 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.017) 0:03:48.139 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906725.3336334, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1717906725.3336334, "dev": 5, "device_type": 64769, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 1588, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1717906725.3336334, "nlink": 1, "path": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:10 Sunday 09 June 2024 04:18:50 +0000 (0:00:00.178) 0:03:48.317 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:17 Sunday 09 June 2024 04:18:51 +0000 (0:00:00.877) 0:03:49.194 *********** ok: [sut] => { "changed": false, "cmd": [ "cryptsetup", "luksDump", "/dev/mapper/foo-test1" ], "delta": "0:00:00.006523", "end": "2024-06-09 04:18:52.007546", "rc": 0, "start": "2024-06-09 04:18:52.001023" } STDOUT: LUKS header information for /dev/mapper/foo-test1 Version: 1 Cipher name: aes Cipher mode: xts-plain64 Hash spec: sha256 Payload offset: 16384 MK bits: 512 MK digest: 33 d7 e6 dc c4 33 3c 1d 1e 9c 84 70 fb b3 82 54 d5 9e 49 5c MK salt: 57 95 47 9d 5d 9c b2 16 0b 68 c3 e5 62 39 0d 1a fd ec 68 ab 81 6f da c6 ff b8 21 1d b9 0c f2 2f MK iterations: 103206 UUID: 25f92b54-a245-4387-8b24-afa23facba1b Key Slot 0: ENABLED Iterations: 1659138 Salt: 4f 34 2b c5 df f9 b4 b1 d2 d3 d2 c2 46 11 1d a3 cb 98 a1 7f 68 47 b9 0f a3 a5 b8 f8 23 62 b1 2e Key material offset: 8 AF stripes: 4000 Key Slot 1: DISABLED Key Slot 2: DISABLED Key Slot 3: DISABLED Key Slot 4: DISABLED Key Slot 5: DISABLED Key Slot 6: DISABLED Key Slot 7: DISABLED TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:23 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.185) 0:03:49.380 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:32 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.017) 0:03:49.398 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:45 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.018) 0:03:49.417 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:51 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.017) 0:03:49.435 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:56 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.018) 0:03:49.453 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:69 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.020) 0:03:49.473 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:81 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.023) 0:03:49.497 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:94 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.020) 0:03:49.517 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [ "luks-25f92b54-a245-4387-8b24-afa23facba1b /dev/mapper/foo-test1 -" ], "_storage_test_expected_crypttab_entries": "1", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:106 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.017) 0:03:49.535 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:114 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.016) 0:03:49.552 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:122 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.017) 0:03:49.570 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:131 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.018) 0:03:49.588 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:140 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.023) 0:03:49.612 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:8 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.013) 0:03:49.626 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:14 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.013) 0:03:49.639 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:21 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.013) 0:03:49.652 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:28 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.013) 0:03:49.665 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:35 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.013) 0:03:49.679 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:45 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.014) 0:03:49.694 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:54 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.013) 0:03:49.707 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:63 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.014) 0:03:49.722 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:72 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.014) 0:03:49.737 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:81 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.013) 0:03:49.751 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:3 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.013) 0:03:49.764 *********** ok: [sut] => { "bytes": 4294967296, "changed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:11 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.284) 0:03:50.048 *********** ok: [sut] => { "bytes": 4294967296, "changed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } TASK [Establish base value for expected size] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:20 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.177) 0:03:50.226 *********** ok: [sut] => { "ansible_facts": { "storage_test_expected_size": "4294967296" }, "changed": false } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:28 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.018) 0:03:50.244 *********** ok: [sut] => { "storage_test_expected_size": "4294967296" } TASK [Get the size of parent/pool device] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:32 Sunday 09 June 2024 04:18:52 +0000 (0:00:00.014) 0:03:50.259 *********** ok: [sut] => { "bytes": 10726680821, "changed": false, "lvm": "9g", "parted": "9GiB", "size": "9 GiB" } TASK [Show test pool] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:46 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.171) 0:03:50.431 *********** skipping: [sut] => {} TASK [Show test blockinfo] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:50 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.015) 0:03:50.447 *********** skipping: [sut] => {} TASK [Show test pool size] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:54 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.015) 0:03:50.463 *********** skipping: [sut] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:58 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.015) 0:03:50.479 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:68 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.016) 0:03:50.495 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:72 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.013) 0:03:50.509 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:77 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.013) 0:03:50.522 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:83 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.013) 0:03:50.536 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:88 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.013) 0:03:50.549 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:96 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.013) 0:03:50.562 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:104 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.015) 0:03:50.577 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:109 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.013) 0:03:50.591 *********** skipping: [sut] => {} TASK [Show volume thin pool size] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:113 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.013) 0:03:50.604 *********** skipping: [sut] => {} TASK [Show test volume size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:117 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.013) 0:03:50.617 *********** skipping: [sut] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:121 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.013) 0:03:50.630 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:129 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.013) 0:03:50.644 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:138 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.014) 0:03:50.658 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:142 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.013) 0:03:50.672 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:150 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.013) 0:03:50.685 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:156 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.013) 0:03:50.699 *********** ok: [sut] => { "storage_test_actual_size": { "bytes": 4294967296, "changed": false, "failed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:160 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.015) 0:03:50.714 *********** ok: [sut] => { "storage_test_expected_size": "4294967296" } TASK [Assert expected size is actual size] ************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:164 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.014) 0:03:50.728 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:5 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.021) 0:03:50.750 *********** ok: [sut] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test1" ], "delta": "0:00:00.032128", "end": "2024-06-09 04:18:53.586578", "rc": 0, "start": "2024-06-09 04:18:53.554450" } STDOUT: LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:13 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.209) 0:03:50.959 *********** ok: [sut] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:18 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.016) 0:03:50.976 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:27 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.019) 0:03:50.995 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:35 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.015) 0:03:51.011 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:41 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.014) 0:03:51.025 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:47 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.014) 0:03:51.039 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:27 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.013) 0:03:51.053 *********** ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:44 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.011) 0:03:51.064 *********** TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:54 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.045) 0:03:51.109 *********** ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Verify preservation of encryption settings on existing LVM volume] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:379 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.012) 0:03:51.122 *********** TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.027) 0:03:51.150 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.018) 0:03:51.169 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.015) 0:03:51.184 *********** skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [linux-system-roles.storage : Check if system is ostree] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:26 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.033) 0:03:51.217 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set flag to indicate system is ostree] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:31 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.013) 0:03:51.231 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.012) 0:03:51.244 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.012) 0:03:51.256 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.011) 0:03:51.268 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Sunday 09 June 2024 04:18:53 +0000 (0:00:00.025) 0:03:51.293 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kmod-kvdo libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet vdo xfsprogs TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:11 Sunday 09 June 2024 04:18:54 +0000 (0:00:00.857) 0:03:52.151 *********** ok: [sut] => { "storage_pools": [ { "disks": [ "sda" ], "name": "foo", "type": "lvm", "volumes": [ { "mount_point": "/opt/test1", "name": "test1", "size": "4g" } ] } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:16 Sunday 09 June 2024 04:18:54 +0000 (0:00:00.015) 0:03:52.167 *********** ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:21 Sunday 09 June 2024 04:18:54 +0000 (0:00:00.014) 0:03:52.181 *********** ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "lvm2" ], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:34 Sunday 09 June 2024 04:18:56 +0000 (0:00:01.211) 0:03:53.392 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for sut TASK [linux-system-roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Sunday 09 June 2024 04:18:56 +0000 (0:00:00.022) 0:03:53.415 *********** TASK [linux-system-roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Sunday 09 June 2024 04:18:56 +0000 (0:00:00.012) 0:03:53.427 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable COPRs] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:20 Sunday 09 June 2024 04:18:56 +0000 (0:00:00.012) 0:03:53.440 *********** TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:41 Sunday 09 June 2024 04:18:56 +0000 (0:00:00.012) 0:03:53.452 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kpartx lvm2 TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 Sunday 09 June 2024 04:18:56 +0000 (0:00:00.861) 0:03:54.313 *********** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-cryptsetup@luks\\x2d42b7f0ca\\x2dbd0f\\x2d4a24\\x2d823e\\x2d8c6207a18623.service": { "name": "systemd-cryptsetup@luks\\x2d42b7f0ca\\x2dbd0f\\x2d4a24\\x2d823e\\x2d8c6207a18623.service", "source": "systemd", "state": "stopped", "status": "generated" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:64 Sunday 09 June 2024 04:18:58 +0000 (0:00:01.480) 0:03:55.794 *********** ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [ "systemd-cryptsetup@luks\\x2d42b7f0ca\\x2dbd0f\\x2d4a24\\x2d823e\\x2d8c6207a18623.service" ] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:78 Sunday 09 June 2024 04:18:58 +0000 (0:00:00.020) 0:03:55.815 *********** changed: [sut] => (item=systemd-cryptsetup@luks\x2d42b7f0ca\x2dbd0f\x2d4a24\x2d823e\x2d8c6207a18623.service) => { "ansible_loop_var": "item", "changed": true, "item": "systemd-cryptsetup@luks\\x2d42b7f0ca\\x2dbd0f\\x2d4a24\\x2d823e\\x2d8c6207a18623.service", "name": "systemd-cryptsetup@luks\\x2d42b7f0ca\\x2dbd0f\\x2d4a24\\x2d823e\\x2d8c6207a18623.service", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "\"system-systemd\\\\x2dcryptsetup.slice\" systemd-udevd-kernel.socket systemd-journald.socket dev-sda1.device cryptsetup-pre.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "umount.target cryptsetup.target \"blockdev@dev-mapper-luks\\\\x2d42b7f0ca\\\\x2dbd0f\\\\x2d4a24\\\\x2d823e\\\\x2d8c6207a18623.target\"", "BindsTo": "dev-sda1.device", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "umount.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "no", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Cryptography Setup for luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623", "DevicePolicy": "auto", "Documentation": "\"man:crypttab(5)\" \"man:systemd-cryptsetup-generator(8)\" \"man:systemd-cryptsetup@.service(8)\"", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup attach luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623 /dev/sda1 VALUE_SPECIFIED_IN_NO_LOG_PARAMETER ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup attach luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623 /dev/sda1 VALUE_SPECIFIED_IN_NO_LOG_PARAMETER ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup detach luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup detach luks-42b7f0ca-bd0f-4a24-823e-8c6207a18623 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/systemd-cryptsetup@luks\\x2d42b7f0ca\\x2dbd0f\\x2d4a24\\x2d823e\\x2d8c6207a18623.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "systemd-cryptsetup@luks\\x2d42b7f0ca\\x2dbd0f\\x2d4a24\\x2d823e\\x2d8c6207a18623.service", "IgnoreOnIsolate": "yes", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "shared", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13972", "LimitNPROCSoft": "13972", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13972", "LimitSIGPENDINGSoft": "13972", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "\"systemd-cryptsetup@luks\\\\x2d42b7f0ca\\\\x2dbd0f\\\\x2d4a24\\\\x2d823e\\\\x2d8c6207a18623.service\"", "NeedDaemonReload": "yes", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "500", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "RequiredBy": "cryptsetup.target", "Requires": "\"system-systemd\\\\x2dcryptsetup.slice\"", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system-systemd\\x2dcryptsetup.slice", "SourcePath": "/etc/crypttab", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sun 2024-06-09 04:18:46 UTC", "StateChangeTimestampMonotonic": "667510163", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22356", "TimeoutAbortUSec": "infinity", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "infinity", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "\"blockdev@dev-mapper-luks\\\\x2d42b7f0ca\\\\x2dbd0f\\\\x2d4a24\\\\x2d823e\\\\x2d8c6207a18623.target\"", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 Sunday 09 June 2024 04:18:58 +0000 (0:00:00.516) 0:03:56.332 *********** ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "state": "mounted" } ], "packages": [ "lvm2", "cryptsetup", "xfsprogs" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks1", "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:98 Sunday 09 June 2024 04:19:00 +0000 (0:00:01.245) 0:03:57.578 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Sunday 09 June 2024 04:19:00 +0000 (0:00:00.012) 0:03:57.591 *********** changed: [sut] => (item=systemd-cryptsetup@luks\x2d42b7f0ca\x2dbd0f\x2d4a24\x2d823e\x2d8c6207a18623.service) => { "ansible_loop_var": "item", "changed": true, "item": "systemd-cryptsetup@luks\\x2d42b7f0ca\\x2dbd0f\\x2d4a24\\x2d823e\\x2d8c6207a18623.service", "name": "systemd-cryptsetup@luks\\x2d42b7f0ca\\x2dbd0f\\x2d4a24\\x2d823e\\x2d8c6207a18623.service", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "no", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "systemd-cryptsetup@luks\\x2d42b7f0ca\\x2dbd0f\\x2d4a24\\x2d823e\\x2d8c6207a18623.service", "DevicePolicy": "auto", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/etc/systemd/system/systemd-cryptsetup@luks\\x2d42b7f0ca\\x2dbd0f\\x2d4a24\\x2d823e\\x2d8c6207a18623.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "systemd-cryptsetup@luks\\x2d42b7f0ca\\x2dbd0f\\x2d4a24\\x2d823e\\x2d8c6207a18623.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "463069184", "LimitMEMLOCKSoft": "463069184", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "1073741816", "LimitNOFILESoft": "1073741816", "LimitNPROC": "13972", "LimitNPROCSoft": "13972", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13972", "LimitSIGPENDINGSoft": "13972", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadError": "org.freedesktop.systemd1.UnitMasked \"Unit systemd-cryptsetup@luks\\x2d42b7f0ca\\x2dbd0f\\x2d4a24\\x2d823e\\x2d8c6207a18623.service is masked.\"", "LoadState": "masked", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "\"systemd-cryptsetup@luks\\\\x2d42b7f0ca\\\\x2dbd0f\\\\x2d4a24\\\\x2d823e\\\\x2d8c6207a18623.service\"", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "inherit", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22356", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "masked", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:116 Sunday 09 June 2024 04:19:00 +0000 (0:00:00.512) 0:03:58.103 *********** ok: [sut] => { "blivet_output": { "actions": [], "changed": false, "crypts": [], "failed": false, "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "state": "mounted" } ], "packages": [ "lvm2", "cryptsetup", "xfsprogs" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks1", "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:125 Sunday 09 June 2024 04:19:00 +0000 (0:00:00.016) 0:03:58.119 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks1", "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:129 Sunday 09 June 2024 04:19:00 +0000 (0:00:00.015) 0:03:58.134 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:145 Sunday 09 June 2024 04:19:00 +0000 (0:00:00.014) 0:03:58.149 *********** TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:157 Sunday 09 June 2024 04:19:00 +0000 (0:00:00.013) 0:03:58.162 *********** ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:162 Sunday 09 June 2024 04:19:01 +0000 (0:00:00.477) 0:03:58.639 *********** ok: [sut] => (item={'src': '/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": false, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b" } TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Sunday 09 June 2024 04:19:01 +0000 (0:00:00.185) 0:03:58.825 *********** skipping: [sut] => (item={'src': '/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:189 Sunday 09 June 2024 04:19:01 +0000 (0:00:00.017) 0:03:58.843 *********** ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:197 Sunday 09 June 2024 04:19:01 +0000 (0:00:00.485) 0:03:59.328 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906728.535644, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "f5fe5989f33339df6109621bead2d7d7e3537c0f", "ctime": 1717906727.36164, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 234881346, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0600", "mtime": 1717906727.36164, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 66, "uid": 0, "version": "3566853825", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:202 Sunday 09 June 2024 04:19:02 +0000 (0:00:00.181) 0:03:59.509 *********** TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:224 Sunday 09 June 2024 04:19:02 +0000 (0:00:00.012) 0:03:59.521 *********** ok: [sut] TASK [Assert preservation of encryption settings on existing LVM volume] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:393 Sunday 09 June 2024 04:19:02 +0000 (0:00:00.614) 0:04:00.136 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:400 Sunday 09 June 2024 04:19:02 +0000 (0:00:00.017) 0:04:00.154 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:2 Sunday 09 June 2024 04:19:02 +0000 (0:00:00.021) 0:04:00.176 *********** ok: [sut] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks1", "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:7 Sunday 09 June 2024 04:19:02 +0000 (0:00:00.017) 0:04:00.193 *********** skipping: [sut] => {} TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:15 Sunday 09 June 2024 04:19:02 +0000 (0:00:00.012) 0:04:00.206 *********** ok: [sut] => { "changed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "crypto_LUKS", "label": "", "name": "/dev/mapper/foo-test1", "size": "4G", "type": "lvm", "uuid": "25f92b54-a245-4387-8b24-afa23facba1b" }, "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b": { "fstype": "xfs", "label": "", "name": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "size": "4G", "type": "crypt", "uuid": "b490495f-38b8-4761-84a8-8d2e9c2b39a7" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "1agGsq-N3WZ-Sn5I-mLjB-j4Aw-8XPP-3ZXjSl" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "xfs", "label": "", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "82a26900-2b36-49b9-bc49-f4d2f0bb14b8" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:20 Sunday 09 June 2024 04:19:03 +0000 (0:00:00.175) 0:04:00.381 *********** ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.004740", "end": "2024-06-09 04:19:03.183158", "rc": 0, "start": "2024-06-09 04:19:03.178418" } STDOUT: # # /etc/fstab # Created by anaconda on Tue May 28 13:15:44 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=82a26900-2b36-49b9-bc49-f4d2f0bb14b8 / xfs defaults 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b /opt/test1 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:25 Sunday 09 June 2024 04:19:03 +0000 (0:00:00.171) 0:04:00.553 *********** ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002870", "end": "2024-06-09 04:19:03.353466", "failed_when_result": false, "rc": 0, "start": "2024-06-09 04:19:03.350596" } STDOUT: luks-25f92b54-a245-4387-8b24-afa23facba1b /dev/mapper/foo-test1 - TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:34 Sunday 09 June 2024 04:19:03 +0000 (0:00:00.173) 0:04:00.726 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml for sut TASK [Set _storage_pool_tests] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:5 Sunday 09 June 2024 04:19:03 +0000 (0:00:00.025) 0:04:00.752 *********** ok: [sut] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:18 Sunday 09 June 2024 04:19:03 +0000 (0:00:00.011) 0:04:00.764 *********** ok: [sut] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.030269", "end": "2024-06-09 04:19:03.594540", "rc": 0, "start": "2024-06-09 04:19:03.564271" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:24 Sunday 09 June 2024 04:19:03 +0000 (0:00:00.203) 0:04:00.967 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:34 Sunday 09 June 2024 04:19:03 +0000 (0:00:00.018) 0:04:00.986 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-volumes.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:2 Sunday 09 June 2024 04:19:03 +0000 (0:00:00.025) 0:04:01.012 *********** ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:13 Sunday 09 June 2024 04:19:03 +0000 (0:00:00.019) 0:04:01.031 *********** ok: [sut] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:22 Sunday 09 June 2024 04:19:03 +0000 (0:00:00.173) 0:04:01.205 *********** ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:27 Sunday 09 June 2024 04:19:03 +0000 (0:00:00.015) 0:04:01.220 *********** ok: [sut] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:33 Sunday 09 June 2024 04:19:03 +0000 (0:00:00.016) 0:04:01.236 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:42 Sunday 09 June 2024 04:19:03 +0000 (0:00:00.017) 0:04:01.253 *********** ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:48 Sunday 09 June 2024 04:19:03 +0000 (0:00:00.015) 0:04:01.269 *********** ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:54 Sunday 09 June 2024 04:19:03 +0000 (0:00:00.016) 0:04:01.285 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:59 Sunday 09 June 2024 04:19:03 +0000 (0:00:00.012) 0:04:01.298 *********** ok: [sut] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check MD RAID] *********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:73 Sunday 09 June 2024 04:19:03 +0000 (0:00:00.019) 0:04:01.318 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml for sut TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:8 Sunday 09 June 2024 04:19:03 +0000 (0:00:00.022) 0:04:01.341 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:14 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.014) 0:04:01.355 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:21 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.013) 0:04:01.368 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:28 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.012) 0:04:01.380 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:35 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.012) 0:04:01.393 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:45 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.012) 0:04:01.406 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:54 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.012) 0:04:01.419 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:64 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.014) 0:04:01.433 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:74 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.012) 0:04:01.445 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:85 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.012) 0:04:01.458 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:95 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.012) 0:04:01.471 *********** ok: [sut] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:76 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.011) 0:04:01.483 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-lvmraid.yml for sut TASK [Validate pool member LVM RAID settings] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-lvmraid.yml:2 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.025) 0:04:01.508 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml for sut TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:8 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.024) 0:04:01.533 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:16 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.012) 0:04:01.545 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:21 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.012) 0:04:01.558 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:29 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.045) 0:04:01.604 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:34 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.014) 0:04:01.618 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:40 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.012) 0:04:01.631 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:46 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.012) 0:04:01.644 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:79 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.013) 0:04:01.657 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-thin.yml for sut TASK [Validate pool member thinpool settings] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-thin.yml:2 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.025) 0:04:01.683 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-thin.yml for sut TASK [Get information about thinpool] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-thin.yml:8 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.024) 0:04:01.707 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-thin.yml:16 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.013) 0:04:01.721 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-thin.yml:23 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.012) 0:04:01.734 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-thin.yml:27 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.012) 0:04:01.747 *********** ok: [sut] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:82 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.012) 0:04:01.759 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:5 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.025) 0:04:01.785 *********** ok: [sut] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:13 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.015) 0:04:01.800 *********** skipping: [sut] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:20 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.018) 0:04:01.819 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml for sut TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml:2 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.023) 0:04:01.842 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml:9 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.015) 0:04:01.858 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml:18 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.015) 0:04:01.873 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml:27 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.014) 0:04:01.887 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml:37 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.012) 0:04:01.900 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml:47 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.012) 0:04:01.913 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:27 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.011) 0:04:01.925 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:85 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.011) 0:04:01.936 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-vdo.yml for sut TASK [Validate pool member VDO settings] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-vdo.yml:2 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.027) 0:04:01.964 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml for sut TASK [Get information about VDO deduplication] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:9 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.025) 0:04:01.989 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:16 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.013) 0:04:02.002 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:22 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.012) 0:04:02.015 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:28 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.013) 0:04:02.029 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:35 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.012) 0:04:02.041 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:41 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.012) 0:04:02.054 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:47 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.012) 0:04:02.067 *********** ok: [sut] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:88 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.011) 0:04:02.079 *********** ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-volumes.yml:3 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.011) 0:04:02.090 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml for sut TASK [Set storage volume test variables] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:2 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.026) 0:04:02.117 *********** ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for {{ storage_test_volume_subset }}] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:21 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.016) 0:04:02.134 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml for sut TASK [Get expected mount device based on device type] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:7 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.056) 0:04:02.190 *********** ok: [sut] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:16 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.016) 0:04:02.206 *********** ok: [sut] => { "ansible_facts": { "storage_test_mount_device_matches": [ { "block_available": 1014697, "block_size": 4096, "block_total": 1030144, "block_used": 15447, "device": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "fstype": "xfs", "inode_available": 2093053, "inode_total": 2093056, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 4156198912, "size_total": 4219469824, "uuid": "b490495f-38b8-4761-84a8-8d2e9c2b39a7" } ], "storage_test_mount_expected_match_count": "1", "storage_test_mount_point_matches": [ { "block_available": 1014697, "block_size": 4096, "block_total": 1030144, "block_used": 15447, "device": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "fstype": "xfs", "inode_available": 2093053, "inode_total": 2093056, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 4156198912, "size_total": 4219469824, "uuid": "b490495f-38b8-4761-84a8-8d2e9c2b39a7" } ], "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:38 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.019) 0:04:02.226 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:51 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.012) 0:04:02.238 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the current mount state by mount point] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:63 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.016) 0:04:02.255 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:71 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.014) 0:04:02.270 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:83 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.012) 0:04:02.282 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:95 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.013) 0:04:02.296 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the mount fs type] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:110 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.012) 0:04:02.309 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Get path of test volume device] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:122 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.015) 0:04:02.325 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:128 Sunday 09 June 2024 04:19:04 +0000 (0:00:00.012) 0:04:02.337 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:134 Sunday 09 June 2024 04:19:05 +0000 (0:00:00.012) 0:04:02.350 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:146 Sunday 09 June 2024 04:19:05 +0000 (0:00:00.018) 0:04:02.368 *********** ok: [sut] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_device_matches": null, "storage_test_mount_expected_match_count": null, "storage_test_mount_point_matches": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:2 Sunday 09 June 2024 04:19:05 +0000 (0:00:00.054) 0:04:02.423 *********** ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:40 Sunday 09 June 2024 04:19:05 +0000 (0:00:00.027) 0:04:02.450 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:48 Sunday 09 June 2024 04:19:05 +0000 (0:00:00.016) 0:04:02.467 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:58 Sunday 09 June 2024 04:19:05 +0000 (0:00:00.016) 0:04:02.484 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:71 Sunday 09 June 2024 04:19:05 +0000 (0:00:00.013) 0:04:02.497 *********** ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml:3 Sunday 09 June 2024 04:19:05 +0000 (0:00:00.012) 0:04:02.509 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml:12 Sunday 09 June 2024 04:19:05 +0000 (0:00:00.020) 0:04:02.530 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:3 Sunday 09 June 2024 04:19:05 +0000 (0:00:00.019) 0:04:02.549 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906732.0056553, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1717906725.0186324, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 1546, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1717906725.0186324, "nlink": 1, "path": "/dev/mapper/foo-test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:9 Sunday 09 June 2024 04:19:05 +0000 (0:00:00.184) 0:04:02.733 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:16 Sunday 09 June 2024 04:19:05 +0000 (0:00:00.018) 0:04:02.752 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:24 Sunday 09 June 2024 04:19:05 +0000 (0:00:00.012) 0:04:02.764 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:30 Sunday 09 June 2024 04:19:05 +0000 (0:00:00.014) 0:04:02.779 *********** ok: [sut] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:34 Sunday 09 June 2024 04:19:05 +0000 (0:00:00.014) 0:04:02.793 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:39 Sunday 09 June 2024 04:19:05 +0000 (0:00:00.012) 0:04:02.806 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:3 Sunday 09 June 2024 04:19:05 +0000 (0:00:00.014) 0:04:02.821 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906740.1566844, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1717906725.3336334, "dev": 5, "device_type": 64769, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 1588, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1717906725.3336334, "nlink": 1, "path": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:10 Sunday 09 June 2024 04:19:05 +0000 (0:00:00.187) 0:04:03.008 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:17 Sunday 09 June 2024 04:19:06 +0000 (0:00:00.877) 0:04:03.885 *********** ok: [sut] => { "changed": false, "cmd": [ "cryptsetup", "luksDump", "/dev/mapper/foo-test1" ], "delta": "0:00:00.006239", "end": "2024-06-09 04:19:06.695438", "rc": 0, "start": "2024-06-09 04:19:06.689199" } STDOUT: LUKS header information for /dev/mapper/foo-test1 Version: 1 Cipher name: aes Cipher mode: xts-plain64 Hash spec: sha256 Payload offset: 16384 MK bits: 512 MK digest: 33 d7 e6 dc c4 33 3c 1d 1e 9c 84 70 fb b3 82 54 d5 9e 49 5c MK salt: 57 95 47 9d 5d 9c b2 16 0b 68 c3 e5 62 39 0d 1a fd ec 68 ab 81 6f da c6 ff b8 21 1d b9 0c f2 2f MK iterations: 103206 UUID: 25f92b54-a245-4387-8b24-afa23facba1b Key Slot 0: ENABLED Iterations: 1659138 Salt: 4f 34 2b c5 df f9 b4 b1 d2 d3 d2 c2 46 11 1d a3 cb 98 a1 7f 68 47 b9 0f a3 a5 b8 f8 23 62 b1 2e Key material offset: 8 AF stripes: 4000 Key Slot 1: DISABLED Key Slot 2: DISABLED Key Slot 3: DISABLED Key Slot 4: DISABLED Key Slot 5: DISABLED Key Slot 6: DISABLED Key Slot 7: DISABLED TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:23 Sunday 09 June 2024 04:19:06 +0000 (0:00:00.181) 0:04:04.067 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:32 Sunday 09 June 2024 04:19:06 +0000 (0:00:00.018) 0:04:04.085 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:45 Sunday 09 June 2024 04:19:06 +0000 (0:00:00.018) 0:04:04.104 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:51 Sunday 09 June 2024 04:19:06 +0000 (0:00:00.018) 0:04:04.122 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:56 Sunday 09 June 2024 04:19:06 +0000 (0:00:00.016) 0:04:04.139 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:69 Sunday 09 June 2024 04:19:06 +0000 (0:00:00.018) 0:04:04.158 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:81 Sunday 09 June 2024 04:19:06 +0000 (0:00:00.013) 0:04:04.171 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:94 Sunday 09 June 2024 04:19:06 +0000 (0:00:00.014) 0:04:04.186 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [ "luks-25f92b54-a245-4387-8b24-afa23facba1b /dev/mapper/foo-test1 -" ], "_storage_test_expected_crypttab_entries": "1", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:106 Sunday 09 June 2024 04:19:06 +0000 (0:00:00.017) 0:04:04.203 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:114 Sunday 09 June 2024 04:19:06 +0000 (0:00:00.015) 0:04:04.219 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:122 Sunday 09 June 2024 04:19:06 +0000 (0:00:00.016) 0:04:04.235 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:131 Sunday 09 June 2024 04:19:06 +0000 (0:00:00.016) 0:04:04.251 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:140 Sunday 09 June 2024 04:19:06 +0000 (0:00:00.016) 0:04:04.268 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:8 Sunday 09 June 2024 04:19:06 +0000 (0:00:00.013) 0:04:04.281 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:14 Sunday 09 June 2024 04:19:06 +0000 (0:00:00.012) 0:04:04.294 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:21 Sunday 09 June 2024 04:19:06 +0000 (0:00:00.013) 0:04:04.307 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:28 Sunday 09 June 2024 04:19:06 +0000 (0:00:00.012) 0:04:04.320 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:35 Sunday 09 June 2024 04:19:06 +0000 (0:00:00.013) 0:04:04.333 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:45 Sunday 09 June 2024 04:19:06 +0000 (0:00:00.012) 0:04:04.346 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:54 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.014) 0:04:04.360 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:63 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.012) 0:04:04.373 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:72 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.012) 0:04:04.385 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:81 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.012) 0:04:04.398 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:3 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.012) 0:04:04.411 *********** ok: [sut] => { "bytes": 4294967296, "changed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:11 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.176) 0:04:04.587 *********** ok: [sut] => { "bytes": 4294967296, "changed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } TASK [Establish base value for expected size] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:20 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.173) 0:04:04.761 *********** ok: [sut] => { "ansible_facts": { "storage_test_expected_size": "4294967296" }, "changed": false } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:28 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.018) 0:04:04.779 *********** ok: [sut] => { "storage_test_expected_size": "4294967296" } TASK [Get the size of parent/pool device] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:32 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.014) 0:04:04.793 *********** ok: [sut] => { "bytes": 10726680821, "changed": false, "lvm": "9g", "parted": "9GiB", "size": "9 GiB" } TASK [Show test pool] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:46 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.169) 0:04:04.963 *********** skipping: [sut] => {} TASK [Show test blockinfo] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:50 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.015) 0:04:04.978 *********** skipping: [sut] => {} TASK [Show test pool size] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:54 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.017) 0:04:04.996 *********** skipping: [sut] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:58 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.015) 0:04:05.011 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:68 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.015) 0:04:05.027 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:72 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.013) 0:04:05.040 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:77 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.011) 0:04:05.052 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:83 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.011) 0:04:05.064 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:88 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.013) 0:04:05.077 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:96 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.012) 0:04:05.090 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:104 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.012) 0:04:05.103 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:109 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.012) 0:04:05.116 *********** skipping: [sut] => {} TASK [Show volume thin pool size] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:113 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.012) 0:04:05.129 *********** skipping: [sut] => {} TASK [Show test volume size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:117 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.012) 0:04:05.142 *********** skipping: [sut] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:121 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.014) 0:04:05.156 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:129 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.012) 0:04:05.169 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:138 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.012) 0:04:05.181 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:142 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.011) 0:04:05.193 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:150 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.011) 0:04:05.205 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:156 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.011) 0:04:05.217 *********** ok: [sut] => { "storage_test_actual_size": { "bytes": 4294967296, "changed": false, "failed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:160 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.015) 0:04:05.232 *********** ok: [sut] => { "storage_test_expected_size": "4294967296" } TASK [Assert expected size is actual size] ************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:164 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.014) 0:04:05.246 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:5 Sunday 09 June 2024 04:19:07 +0000 (0:00:00.018) 0:04:05.265 *********** ok: [sut] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test1" ], "delta": "0:00:00.032193", "end": "2024-06-09 04:19:08.101123", "rc": 0, "start": "2024-06-09 04:19:08.068930" } STDOUT: LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:13 Sunday 09 June 2024 04:19:08 +0000 (0:00:00.206) 0:04:05.472 *********** ok: [sut] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:18 Sunday 09 June 2024 04:19:08 +0000 (0:00:00.017) 0:04:05.489 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:27 Sunday 09 June 2024 04:19:08 +0000 (0:00:00.017) 0:04:05.507 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:35 Sunday 09 June 2024 04:19:08 +0000 (0:00:00.014) 0:04:05.522 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:41 Sunday 09 June 2024 04:19:08 +0000 (0:00:00.014) 0:04:05.536 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:47 Sunday 09 June 2024 04:19:08 +0000 (0:00:00.014) 0:04:05.550 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:27 Sunday 09 June 2024 04:19:08 +0000 (0:00:00.048) 0:04:05.599 *********** ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:44 Sunday 09 June 2024 04:19:08 +0000 (0:00:00.013) 0:04:05.612 *********** TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:54 Sunday 09 June 2024 04:19:08 +0000 (0:00:00.011) 0:04:05.624 *********** ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Create a file] *********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/create-test-file.yml:12 Sunday 09 June 2024 04:19:08 +0000 (0:00:00.011) 0:04:05.636 *********** changed: [sut] => { "changed": true, "dest": "/opt/test1/quux", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "secontext": "unconfined_u:object_r:unlabeled_t:s0", "size": 0, "state": "file", "uid": 0 } TASK [Test for correct handling of safe_mode] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:406 Sunday 09 June 2024 04:19:08 +0000 (0:00:00.181) 0:04:05.817 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml for sut TASK [Store global variable value copy] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:4 Sunday 09 June 2024 04:19:08 +0000 (0:00:00.022) 0:04:05.839 *********** ok: [sut] => { "ansible_facts": { "storage_pools_global": [], "storage_safe_mode_global": true, "storage_volumes_global": [] }, "changed": false } TASK [Verify role raises correct error] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:10 Sunday 09 June 2024 04:19:08 +0000 (0:00:00.016) 0:04:05.856 *********** TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Sunday 09 June 2024 04:19:08 +0000 (0:00:00.017) 0:04:05.874 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Sunday 09 June 2024 04:19:08 +0000 (0:00:00.017) 0:04:05.891 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Sunday 09 June 2024 04:19:08 +0000 (0:00:00.015) 0:04:05.906 *********** skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [linux-system-roles.storage : Check if system is ostree] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:26 Sunday 09 June 2024 04:19:08 +0000 (0:00:00.035) 0:04:05.942 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set flag to indicate system is ostree] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:31 Sunday 09 June 2024 04:19:08 +0000 (0:00:00.013) 0:04:05.955 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Sunday 09 June 2024 04:19:08 +0000 (0:00:00.012) 0:04:05.968 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Sunday 09 June 2024 04:19:08 +0000 (0:00:00.011) 0:04:05.980 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Sunday 09 June 2024 04:19:08 +0000 (0:00:00.011) 0:04:05.992 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Sunday 09 June 2024 04:19:08 +0000 (0:00:00.027) 0:04:06.019 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kmod-kvdo libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet vdo xfsprogs TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:11 Sunday 09 June 2024 04:19:09 +0000 (0:00:00.855) 0:04:06.874 *********** ok: [sut] => { "storage_pools": [ { "disks": [ "sda" ], "name": "foo", "type": "lvm", "volumes": [ { "encryption": false, "encryption_password": "yabbadabbadoo", "mount_point": "/opt/test1", "name": "test1", "size": "4g" } ] } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:16 Sunday 09 June 2024 04:19:09 +0000 (0:00:00.016) 0:04:06.891 *********** ok: [sut] => { "storage_volumes": [] } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:21 Sunday 09 June 2024 04:19:09 +0000 (0:00:00.016) 0:04:06.908 *********** ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "lvm2" ], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:34 Sunday 09 June 2024 04:19:10 +0000 (0:00:01.209) 0:04:08.117 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for sut TASK [linux-system-roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Sunday 09 June 2024 04:19:10 +0000 (0:00:00.025) 0:04:08.142 *********** TASK [linux-system-roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Sunday 09 June 2024 04:19:10 +0000 (0:00:00.012) 0:04:08.154 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable COPRs] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:20 Sunday 09 June 2024 04:19:10 +0000 (0:00:00.012) 0:04:08.167 *********** TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:41 Sunday 09 June 2024 04:19:10 +0000 (0:00:00.011) 0:04:08.179 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kpartx lvm2 TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 Sunday 09 June 2024 04:19:11 +0000 (0:00:00.866) 0:04:09.045 *********** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service": { "name": "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "source": "systemd", "state": "stopped", "status": "generated" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:64 Sunday 09 June 2024 04:19:13 +0000 (0:00:01.484) 0:04:10.529 *********** ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [ "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service" ] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:78 Sunday 09 June 2024 04:19:13 +0000 (0:00:00.020) 0:04:10.550 *********** changed: [sut] => (item=systemd-cryptsetup@luks\x2d25f92b54\x2da245\x2d4387\x2d8b24\x2dafa23facba1b.service) => { "ansible_loop_var": "item", "changed": true, "item": "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "name": "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "systemd-journald.socket \"system-systemd\\\\x2dcryptsetup.slice\" cryptsetup-pre.target \"dev-mapper-foo\\\\x2dtest1.device\" systemd-udevd-kernel.socket", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "cryptsetup.target umount.target \"blockdev@dev-mapper-luks\\\\x2d25f92b54\\\\x2da245\\\\x2d4387\\\\x2d8b24\\\\x2dafa23facba1b.target\"", "BindsTo": "\"dev-mapper-foo\\\\x2dtest1.device\"", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "umount.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "no", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Cryptography Setup for luks-25f92b54-a245-4387-8b24-afa23facba1b", "DevicePolicy": "auto", "Documentation": "\"man:crypttab(5)\" \"man:systemd-cryptsetup-generator(8)\" \"man:systemd-cryptsetup@.service(8)\"", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup attach luks-25f92b54-a245-4387-8b24-afa23facba1b /dev/mapper/foo-test1 - ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup attach luks-25f92b54-a245-4387-8b24-afa23facba1b /dev/mapper/foo-test1 - ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup detach luks-25f92b54-a245-4387-8b24-afa23facba1b ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup detach luks-25f92b54-a245-4387-8b24-afa23facba1b ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "IgnoreOnIsolate": "yes", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "shared", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13972", "LimitNPROCSoft": "13972", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13972", "LimitSIGPENDINGSoft": "13972", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "\"systemd-cryptsetup@luks\\\\x2d25f92b54\\\\x2da245\\\\x2d4387\\\\x2d8b24\\\\x2dafa23facba1b.service\"", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "500", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "RequiredBy": "cryptsetup.target \"dev-mapper-luks\\\\x2d25f92b54\\\\x2da245\\\\x2d4387\\\\x2d8b24\\\\x2dafa23facba1b.device\"", "Requires": "\"system-systemd\\\\x2dcryptsetup.slice\"", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system-systemd\\x2dcryptsetup.slice", "SourcePath": "/etc/crypttab", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sun 2024-06-09 04:19:00 UTC", "StateChangeTimestampMonotonic": "681384032", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22356", "TimeoutAbortUSec": "infinity", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "infinity", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "\"blockdev@dev-mapper-luks\\\\x2d25f92b54\\\\x2da245\\\\x2d4387\\\\x2d8b24\\\\x2dafa23facba1b.target\"", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 Sunday 09 June 2024 04:19:13 +0000 (0:00:00.527) 0:04:11.077 *********** fatal: [sut]: FAILED! => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } MSG: cannot remove existing formatting on device 'luks-25f92b54-a245-4387-8b24-afa23facba1b' in safe mode due to encryption removal TASK [linux-system-roles.storage : Failed message] ***************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:106 Sunday 09 June 2024 04:19:14 +0000 (0:00:01.214) 0:04:12.291 *********** fatal: [sut]: FAILED! => { "changed": false } MSG: {'msg': "cannot remove existing formatting on device 'luks-25f92b54-a245-4387-8b24-afa23facba1b' in safe mode due to encryption removal", 'changed': False, 'actions': [], 'leaves': [], 'mounts': [], 'crypts': [], 'pools': [], 'volumes': [], 'packages': [], 'failed': True, 'invocation': {'module_args': {'pools': [{'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': 0, 'encryption_luks_version': 'luks1', 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None}]}], 'volumes': [], 'use_partitions': None, 'disklabel_type': None, 'pool_defaults': {'state': 'present', 'type': 'lvm', 'disks': [], 'volumes': [], 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'shared': False}, 'volume_defaults': {'state': 'present', 'type': 'lvm', 'size': 0, 'disks': [], 'fs_type': 'xfs', 'fs_label': '', 'fs_create_options': '', 'fs_overwrite_existing': True, 'mount_point': '', 'mount_options': 'defaults', 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_stripe_size': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': []}, 'safe_mode': True, 'packages_only': False, 'diskvolume_mkfs_option_map': {}}}, '_ansible_no_log': False} TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Sunday 09 June 2024 04:19:14 +0000 (0:00:00.017) 0:04:12.308 *********** changed: [sut] => (item=systemd-cryptsetup@luks\x2d25f92b54\x2da245\x2d4387\x2d8b24\x2dafa23facba1b.service) => { "ansible_loop_var": "item", "changed": true, "item": "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "name": "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "no", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "DevicePolicy": "auto", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/etc/systemd/system/systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "463069184", "LimitMEMLOCKSoft": "463069184", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "1073741816", "LimitNOFILESoft": "1073741816", "LimitNPROC": "13972", "LimitNPROCSoft": "13972", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13972", "LimitSIGPENDINGSoft": "13972", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadError": "org.freedesktop.systemd1.UnitMasked \"Unit systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service is masked.\"", "LoadState": "masked", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "\"systemd-cryptsetup@luks\\\\x2d25f92b54\\\\x2da245\\\\x2d4387\\\\x2d8b24\\\\x2dafa23facba1b.service\"", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "RequiredBy": "cryptsetup.target \"dev-mapper-luks\\\\x2d25f92b54\\\\x2da245\\\\x2d4387\\\\x2d8b24\\\\x2dafa23facba1b.device\"", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "inherit", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sun 2024-06-09 04:19:00 UTC", "StateChangeTimestampMonotonic": "681384032", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22356", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "masked", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [Check that we failed in the role] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:29 Sunday 09 June 2024 04:19:15 +0000 (0:00:00.521) 0:04:12.830 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the blivet output and error message are correct] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:34 Sunday 09 June 2024 04:19:15 +0000 (0:00:00.016) 0:04:12.846 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify correct exception or error message] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:45 Sunday 09 June 2024 04:19:15 +0000 (0:00:00.020) 0:04:12.866 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Stat the file] *********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-data-preservation.yml:11 Sunday 09 June 2024 04:19:15 +0000 (0:00:00.013) 0:04:12.880 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906748.4447153, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1717906748.4447153, "dev": 64769, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0644", "mtime": 1717906748.4447153, "nlink": 1, "path": "/opt/test1/quux", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 0, "uid": 0, "version": "3277495385", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Assert file presence] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-data-preservation.yml:16 Sunday 09 June 2024 04:19:15 +0000 (0:00:00.179) 0:04:13.059 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Remove the encryption layer] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:429 Sunday 09 June 2024 04:19:15 +0000 (0:00:00.016) 0:04:13.076 *********** TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Sunday 09 June 2024 04:19:15 +0000 (0:00:00.031) 0:04:13.107 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Sunday 09 June 2024 04:19:15 +0000 (0:00:00.018) 0:04:13.126 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Sunday 09 June 2024 04:19:15 +0000 (0:00:00.015) 0:04:13.141 *********** skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [linux-system-roles.storage : Check if system is ostree] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:26 Sunday 09 June 2024 04:19:15 +0000 (0:00:00.032) 0:04:13.174 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set flag to indicate system is ostree] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:31 Sunday 09 June 2024 04:19:15 +0000 (0:00:00.012) 0:04:13.186 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Sunday 09 June 2024 04:19:15 +0000 (0:00:00.014) 0:04:13.201 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Sunday 09 June 2024 04:19:15 +0000 (0:00:00.011) 0:04:13.213 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Sunday 09 June 2024 04:19:15 +0000 (0:00:00.011) 0:04:13.224 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Sunday 09 June 2024 04:19:15 +0000 (0:00:00.025) 0:04:13.250 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kmod-kvdo libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet vdo xfsprogs TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:11 Sunday 09 June 2024 04:19:16 +0000 (0:00:00.867) 0:04:14.117 *********** ok: [sut] => { "storage_pools": [ { "disks": [ "sda" ], "name": "foo", "type": "lvm", "volumes": [ { "encryption": false, "encryption_password": "yabbadabbadoo", "mount_point": "/opt/test1", "name": "test1", "size": "4g" } ] } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:16 Sunday 09 June 2024 04:19:16 +0000 (0:00:00.015) 0:04:14.133 *********** ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:21 Sunday 09 June 2024 04:19:16 +0000 (0:00:00.014) 0:04:14.147 *********** ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "lvm2" ], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:34 Sunday 09 June 2024 04:19:17 +0000 (0:00:01.195) 0:04:15.343 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for sut TASK [linux-system-roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Sunday 09 June 2024 04:19:18 +0000 (0:00:00.024) 0:04:15.367 *********** TASK [linux-system-roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Sunday 09 June 2024 04:19:18 +0000 (0:00:00.014) 0:04:15.381 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable COPRs] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:20 Sunday 09 June 2024 04:19:18 +0000 (0:00:00.013) 0:04:15.395 *********** TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:41 Sunday 09 June 2024 04:19:18 +0000 (0:00:00.013) 0:04:15.408 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kpartx lvm2 TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 Sunday 09 June 2024 04:19:18 +0000 (0:00:00.872) 0:04:16.281 *********** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service": { "name": "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "source": "systemd", "state": "stopped", "status": "generated" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:64 Sunday 09 June 2024 04:19:20 +0000 (0:00:01.486) 0:04:17.768 *********** ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [ "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service" ] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:78 Sunday 09 June 2024 04:19:20 +0000 (0:00:00.021) 0:04:17.789 *********** changed: [sut] => (item=systemd-cryptsetup@luks\x2d25f92b54\x2da245\x2d4387\x2d8b24\x2dafa23facba1b.service) => { "ansible_loop_var": "item", "changed": true, "item": "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "name": "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "\"system-systemd\\\\x2dcryptsetup.slice\" systemd-journald.socket systemd-udevd-kernel.socket \"dev-mapper-foo\\\\x2dtest1.device\" cryptsetup-pre.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "\"blockdev@dev-mapper-luks\\\\x2d25f92b54\\\\x2da245\\\\x2d4387\\\\x2d8b24\\\\x2dafa23facba1b.target\" umount.target cryptsetup.target", "BindsTo": "\"dev-mapper-foo\\\\x2dtest1.device\"", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "umount.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "no", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Cryptography Setup for luks-25f92b54-a245-4387-8b24-afa23facba1b", "DevicePolicy": "auto", "Documentation": "\"man:crypttab(5)\" \"man:systemd-cryptsetup-generator(8)\" \"man:systemd-cryptsetup@.service(8)\"", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup attach luks-25f92b54-a245-4387-8b24-afa23facba1b /dev/mapper/foo-test1 - ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup attach luks-25f92b54-a245-4387-8b24-afa23facba1b /dev/mapper/foo-test1 - ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup detach luks-25f92b54-a245-4387-8b24-afa23facba1b ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup detach luks-25f92b54-a245-4387-8b24-afa23facba1b ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "IgnoreOnIsolate": "yes", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "shared", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13972", "LimitNPROCSoft": "13972", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13972", "LimitSIGPENDINGSoft": "13972", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "\"systemd-cryptsetup@luks\\\\x2d25f92b54\\\\x2da245\\\\x2d4387\\\\x2d8b24\\\\x2dafa23facba1b.service\"", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "500", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "RequiredBy": "\"dev-mapper-luks\\\\x2d25f92b54\\\\x2da245\\\\x2d4387\\\\x2d8b24\\\\x2dafa23facba1b.device\" cryptsetup.target", "Requires": "\"system-systemd\\\\x2dcryptsetup.slice\"", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system-systemd\\x2dcryptsetup.slice", "SourcePath": "/etc/crypttab", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sun 2024-06-09 04:19:00 UTC", "StateChangeTimestampMonotonic": "681384032", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22356", "TimeoutAbortUSec": "infinity", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "infinity", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "\"blockdev@dev-mapper-luks\\\\x2d25f92b54\\\\x2da245\\\\x2d4387\\\\x2d8b24\\\\x2dafa23facba1b.target\"", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 Sunday 09 June 2024 04:19:20 +0000 (0:00:00.515) 0:04:18.304 *********** changed: [sut] => { "actions": [ { "action": "destroy format", "device": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/foo-test1", "fs_type": "luks" }, { "action": "create format", "device": "/dev/mapper/foo-test1", "fs_type": "xfs" } ], "changed": true, "crypts": [ { "backing_device": "/dev/mapper/foo-test1", "name": "luks-25f92b54-a245-4387-8b24-afa23facba1b", "password": "-", "state": "absent" } ], "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "state": "absent" }, { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "xfsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks1", "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:98 Sunday 09 June 2024 04:19:22 +0000 (0:00:01.965) 0:04:20.270 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Sunday 09 June 2024 04:19:22 +0000 (0:00:00.013) 0:04:20.283 *********** changed: [sut] => (item=systemd-cryptsetup@luks\x2d25f92b54\x2da245\x2d4387\x2d8b24\x2dafa23facba1b.service) => { "ansible_loop_var": "item", "changed": true, "item": "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "name": "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "no", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "DevicePolicy": "auto", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/etc/systemd/system/systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "463069184", "LimitMEMLOCKSoft": "463069184", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "1073741816", "LimitNOFILESoft": "1073741816", "LimitNPROC": "13972", "LimitNPROCSoft": "13972", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13972", "LimitSIGPENDINGSoft": "13972", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadError": "org.freedesktop.systemd1.UnitMasked \"Unit systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service is masked.\"", "LoadState": "masked", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "\"systemd-cryptsetup@luks\\\\x2d25f92b54\\\\x2da245\\\\x2d4387\\\\x2d8b24\\\\x2dafa23facba1b.service\"", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "RequiredBy": "cryptsetup.target \"dev-mapper-luks\\\\x2d25f92b54\\\\x2da245\\\\x2d4387\\\\x2d8b24\\\\x2dafa23facba1b.device\"", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "inherit", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sun 2024-06-09 04:19:00 UTC", "StateChangeTimestampMonotonic": "681384032", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22356", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "masked", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:116 Sunday 09 June 2024 04:19:23 +0000 (0:00:00.509) 0:04:20.793 *********** ok: [sut] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/foo-test1", "fs_type": "luks" }, { "action": "create format", "device": "/dev/mapper/foo-test1", "fs_type": "xfs" } ], "changed": true, "crypts": [ { "backing_device": "/dev/mapper/foo-test1", "name": "luks-25f92b54-a245-4387-8b24-afa23facba1b", "password": "-", "state": "absent" } ], "failed": false, "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "state": "absent" }, { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "xfsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks1", "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:125 Sunday 09 June 2024 04:19:23 +0000 (0:00:00.018) 0:04:20.811 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks1", "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:129 Sunday 09 June 2024 04:19:23 +0000 (0:00:00.018) 0:04:20.829 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:145 Sunday 09 June 2024 04:19:23 +0000 (0:00:00.017) 0:04:20.847 *********** changed: [sut] => (item={'src': '/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b', 'path': '/opt/test1', 'state': 'absent', 'fstype': 'xfs'}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b", "state": "absent" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/luks-25f92b54-a245-4387-8b24-afa23facba1b" } TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:157 Sunday 09 June 2024 04:19:23 +0000 (0:00:00.184) 0:04:21.032 *********** ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:162 Sunday 09 June 2024 04:19:24 +0000 (0:00:00.488) 0:04:21.521 *********** changed: [sut] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Sunday 09 June 2024 04:19:24 +0000 (0:00:00.188) 0:04:21.709 *********** skipping: [sut] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:189 Sunday 09 June 2024 04:19:24 +0000 (0:00:00.018) 0:04:21.728 *********** ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:197 Sunday 09 June 2024 04:19:24 +0000 (0:00:00.479) 0:04:22.207 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906728.535644, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "f5fe5989f33339df6109621bead2d7d7e3537c0f", "ctime": 1717906727.36164, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 234881346, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0600", "mtime": 1717906727.36164, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 66, "uid": 0, "version": "3566853825", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:202 Sunday 09 June 2024 04:19:25 +0000 (0:00:00.182) 0:04:22.389 *********** changed: [sut] => (item={'backing_device': '/dev/mapper/foo-test1', 'name': 'luks-25f92b54-a245-4387-8b24-afa23facba1b', 'password': '-', 'state': 'absent'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/mapper/foo-test1", "name": "luks-25f92b54-a245-4387-8b24-afa23facba1b", "password": "-", "state": "absent" }, "found": 1 } MSG: 1 line(s) removed TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:224 Sunday 09 June 2024 04:19:25 +0000 (0:00:00.183) 0:04:22.573 *********** ok: [sut] TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:445 Sunday 09 June 2024 04:19:25 +0000 (0:00:00.593) 0:04:23.167 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:2 Sunday 09 June 2024 04:19:25 +0000 (0:00:00.025) 0:04:23.193 *********** ok: [sut] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks1", "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:7 Sunday 09 June 2024 04:19:25 +0000 (0:00:00.018) 0:04:23.211 *********** skipping: [sut] => {} TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:15 Sunday 09 June 2024 04:19:25 +0000 (0:00:00.013) 0:04:23.224 *********** ok: [sut] => { "changed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "xfs", "label": "", "name": "/dev/mapper/foo-test1", "size": "4G", "type": "lvm", "uuid": "89e3dde7-2798-4b00-a713-a48782cceb2f" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "1agGsq-N3WZ-Sn5I-mLjB-j4Aw-8XPP-3ZXjSl" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "xfs", "label": "", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "82a26900-2b36-49b9-bc49-f4d2f0bb14b8" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:20 Sunday 09 June 2024 04:19:26 +0000 (0:00:00.176) 0:04:23.400 *********** ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002534", "end": "2024-06-09 04:19:26.202285", "rc": 0, "start": "2024-06-09 04:19:26.199751" } STDOUT: # # /etc/fstab # Created by anaconda on Tue May 28 13:15:44 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=82a26900-2b36-49b9-bc49-f4d2f0bb14b8 / xfs defaults 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/foo-test1 /opt/test1 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:25 Sunday 09 June 2024 04:19:26 +0000 (0:00:00.173) 0:04:23.574 *********** ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002535", "end": "2024-06-09 04:19:26.376279", "failed_when_result": false, "rc": 0, "start": "2024-06-09 04:19:26.373744" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:34 Sunday 09 June 2024 04:19:26 +0000 (0:00:00.176) 0:04:23.750 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml for sut TASK [Set _storage_pool_tests] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:5 Sunday 09 June 2024 04:19:26 +0000 (0:00:00.027) 0:04:23.778 *********** ok: [sut] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:18 Sunday 09 June 2024 04:19:26 +0000 (0:00:00.013) 0:04:23.792 *********** ok: [sut] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.040184", "end": "2024-06-09 04:19:26.634596", "rc": 0, "start": "2024-06-09 04:19:26.594412" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:24 Sunday 09 June 2024 04:19:26 +0000 (0:00:00.215) 0:04:24.008 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:34 Sunday 09 June 2024 04:19:26 +0000 (0:00:00.019) 0:04:24.027 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-volumes.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:2 Sunday 09 June 2024 04:19:26 +0000 (0:00:00.026) 0:04:24.054 *********** ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:13 Sunday 09 June 2024 04:19:26 +0000 (0:00:00.019) 0:04:24.073 *********** ok: [sut] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:22 Sunday 09 June 2024 04:19:26 +0000 (0:00:00.177) 0:04:24.250 *********** ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:27 Sunday 09 June 2024 04:19:26 +0000 (0:00:00.015) 0:04:24.266 *********** ok: [sut] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:33 Sunday 09 June 2024 04:19:26 +0000 (0:00:00.016) 0:04:24.282 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:42 Sunday 09 June 2024 04:19:26 +0000 (0:00:00.017) 0:04:24.300 *********** ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:48 Sunday 09 June 2024 04:19:26 +0000 (0:00:00.015) 0:04:24.316 *********** ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:54 Sunday 09 June 2024 04:19:26 +0000 (0:00:00.015) 0:04:24.331 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:59 Sunday 09 June 2024 04:19:26 +0000 (0:00:00.015) 0:04:24.346 *********** ok: [sut] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check MD RAID] *********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:73 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.020) 0:04:24.367 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml for sut TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:8 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.024) 0:04:24.391 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:14 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.013) 0:04:24.405 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:21 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.014) 0:04:24.419 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:28 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.057) 0:04:24.477 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:35 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.015) 0:04:24.492 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:45 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.013) 0:04:24.505 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:54 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.013) 0:04:24.519 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:64 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.013) 0:04:24.532 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:74 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.013) 0:04:24.546 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:85 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.015) 0:04:24.561 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:95 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.013) 0:04:24.574 *********** ok: [sut] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:76 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.013) 0:04:24.588 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-lvmraid.yml for sut TASK [Validate pool member LVM RAID settings] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-lvmraid.yml:2 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.026) 0:04:24.614 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml for sut TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:8 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.028) 0:04:24.643 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:16 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.013) 0:04:24.657 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:21 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.013) 0:04:24.670 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:29 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.013) 0:04:24.683 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:34 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.013) 0:04:24.696 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:40 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.015) 0:04:24.712 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:46 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.014) 0:04:24.727 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:79 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.014) 0:04:24.741 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-thin.yml for sut TASK [Validate pool member thinpool settings] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-thin.yml:2 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.024) 0:04:24.766 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-thin.yml for sut TASK [Get information about thinpool] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-thin.yml:8 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.026) 0:04:24.793 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-thin.yml:16 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.012) 0:04:24.806 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-thin.yml:23 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.012) 0:04:24.819 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-thin.yml:27 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.012) 0:04:24.832 *********** ok: [sut] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:82 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.011) 0:04:24.844 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:5 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.028) 0:04:24.872 *********** ok: [sut] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:13 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.015) 0:04:24.887 *********** skipping: [sut] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:20 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.015) 0:04:24.903 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml for sut TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml:2 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.024) 0:04:24.927 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml:9 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.015) 0:04:24.943 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml:18 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.016) 0:04:24.959 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml:27 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.012) 0:04:24.972 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml:37 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.012) 0:04:24.984 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml:47 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.014) 0:04:24.999 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:27 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.013) 0:04:25.012 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:85 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.012) 0:04:25.024 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-vdo.yml for sut TASK [Validate pool member VDO settings] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-vdo.yml:2 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.026) 0:04:25.050 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml for sut TASK [Get information about VDO deduplication] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:9 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.027) 0:04:25.078 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:16 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.013) 0:04:25.091 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:22 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.012) 0:04:25.104 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:28 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.012) 0:04:25.117 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:35 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.012) 0:04:25.129 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:41 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.014) 0:04:25.144 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:47 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.013) 0:04:25.157 *********** ok: [sut] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:88 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.012) 0:04:25.169 *********** ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-volumes.yml:3 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.011) 0:04:25.180 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml for sut TASK [Set storage volume test variables] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:2 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.024) 0:04:25.205 *********** ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for {{ storage_test_volume_subset }}] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:21 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.016) 0:04:25.221 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml for sut TASK [Get expected mount device based on device type] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:7 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.105) 0:04:25.326 *********** ok: [sut] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:16 Sunday 09 June 2024 04:19:27 +0000 (0:00:00.017) 0:04:25.344 *********** ok: [sut] => { "ansible_facts": { "storage_test_mount_device_matches": [ { "block_available": 1016730, "block_size": 4096, "block_total": 1032192, "block_used": 15462, "device": "/dev/mapper/foo-test1", "fstype": "xfs", "inode_available": 2097149, "inode_total": 2097152, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 4164526080, "size_total": 4227858432, "uuid": "89e3dde7-2798-4b00-a713-a48782cceb2f" } ], "storage_test_mount_expected_match_count": "1", "storage_test_mount_point_matches": [ { "block_available": 1016730, "block_size": 4096, "block_total": 1032192, "block_used": 15462, "device": "/dev/mapper/foo-test1", "fstype": "xfs", "inode_available": 2097149, "inode_total": 2097152, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 4164526080, "size_total": 4227858432, "uuid": "89e3dde7-2798-4b00-a713-a48782cceb2f" } ], "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:38 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.021) 0:04:25.365 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:51 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.014) 0:04:25.379 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the current mount state by mount point] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:63 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.017) 0:04:25.397 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:71 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.015) 0:04:25.412 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:83 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.013) 0:04:25.425 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:95 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.012) 0:04:25.438 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the mount fs type] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:110 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.012) 0:04:25.450 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Get path of test volume device] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:122 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.019) 0:04:25.470 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:128 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.012) 0:04:25.483 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:134 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.012) 0:04:25.495 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:146 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.012) 0:04:25.508 *********** ok: [sut] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_device_matches": null, "storage_test_mount_expected_match_count": null, "storage_test_mount_point_matches": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:2 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.013) 0:04:25.521 *********** ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/foo-test1 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:40 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.026) 0:04:25.548 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:48 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.018) 0:04:25.566 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:58 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.016) 0:04:25.582 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:71 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.014) 0:04:25.597 *********** ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml:3 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.013) 0:04:25.611 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml:12 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.021) 0:04:25.632 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:3 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.019) 0:04:25.652 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906762.8437688, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1717906762.8437688, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 1667, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1717906762.8437688, "nlink": 1, "path": "/dev/mapper/foo-test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:9 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.180) 0:04:25.833 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:16 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.018) 0:04:25.851 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:24 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.014) 0:04:25.865 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:30 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.016) 0:04:25.882 *********** ok: [sut] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:34 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.014) 0:04:25.896 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:39 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.012) 0:04:25.909 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:3 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.015) 0:04:25.924 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:10 Sunday 09 June 2024 04:19:28 +0000 (0:00:00.013) 0:04:25.937 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:17 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.859) 0:04:26.797 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:23 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.013) 0:04:26.810 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:32 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.013) 0:04:26.824 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:45 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.020) 0:04:26.845 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:51 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.015) 0:04:26.860 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:56 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.018) 0:04:26.879 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:69 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.013) 0:04:26.892 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:81 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.013) 0:04:26.906 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:94 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.013) 0:04:26.919 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:106 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.016) 0:04:26.935 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:114 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.015) 0:04:26.951 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:122 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.014) 0:04:26.965 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:131 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.013) 0:04:26.979 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:140 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.013) 0:04:26.992 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:8 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.011) 0:04:27.004 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:14 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.012) 0:04:27.017 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:21 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.012) 0:04:27.030 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:28 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.014) 0:04:27.044 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:35 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.012) 0:04:27.057 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:45 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.012) 0:04:27.070 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:54 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.012) 0:04:27.083 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:63 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.012) 0:04:27.096 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:72 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.012) 0:04:27.108 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:81 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.014) 0:04:27.123 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:3 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.012) 0:04:27.136 *********** ok: [sut] => { "bytes": 4294967296, "changed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:11 Sunday 09 June 2024 04:19:29 +0000 (0:00:00.174) 0:04:27.310 *********** ok: [sut] => { "bytes": 4294967296, "changed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } TASK [Establish base value for expected size] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:20 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.174) 0:04:27.485 *********** ok: [sut] => { "ansible_facts": { "storage_test_expected_size": "4294967296" }, "changed": false } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:28 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.017) 0:04:27.503 *********** ok: [sut] => { "storage_test_expected_size": "4294967296" } TASK [Get the size of parent/pool device] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:32 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.014) 0:04:27.517 *********** ok: [sut] => { "bytes": 10726680821, "changed": false, "lvm": "9g", "parted": "9GiB", "size": "9 GiB" } TASK [Show test pool] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:46 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.168) 0:04:27.686 *********** skipping: [sut] => {} TASK [Show test blockinfo] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:50 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.015) 0:04:27.701 *********** skipping: [sut] => {} TASK [Show test pool size] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:54 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.015) 0:04:27.717 *********** skipping: [sut] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:58 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.014) 0:04:27.732 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:68 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.013) 0:04:27.745 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:72 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.013) 0:04:27.759 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:77 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.012) 0:04:27.772 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:83 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.012) 0:04:27.785 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:88 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.012) 0:04:27.798 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:96 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.011) 0:04:27.809 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:104 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.012) 0:04:27.822 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:109 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.014) 0:04:27.836 *********** skipping: [sut] => {} TASK [Show volume thin pool size] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:113 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.012) 0:04:27.849 *********** skipping: [sut] => {} TASK [Show test volume size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:117 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.012) 0:04:27.862 *********** skipping: [sut] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:121 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.013) 0:04:27.875 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:129 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.013) 0:04:27.888 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:138 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.013) 0:04:27.902 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:142 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.063) 0:04:27.965 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:150 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.014) 0:04:27.980 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:156 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.013) 0:04:27.993 *********** ok: [sut] => { "storage_test_actual_size": { "bytes": 4294967296, "changed": false, "failed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:160 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.015) 0:04:28.009 *********** ok: [sut] => { "storage_test_expected_size": "4294967296" } TASK [Assert expected size is actual size] ************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:164 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.013) 0:04:28.023 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:5 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.020) 0:04:28.044 *********** ok: [sut] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test1" ], "delta": "0:00:00.032396", "end": "2024-06-09 04:19:30.885580", "rc": 0, "start": "2024-06-09 04:19:30.853184" } STDOUT: LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:13 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.215) 0:04:28.259 *********** ok: [sut] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:18 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.017) 0:04:28.277 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:27 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.017) 0:04:28.295 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:35 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.014) 0:04:28.309 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:41 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.014) 0:04:28.323 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:47 Sunday 09 June 2024 04:19:30 +0000 (0:00:00.014) 0:04:28.338 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:27 Sunday 09 June 2024 04:19:31 +0000 (0:00:00.016) 0:04:28.354 *********** ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:44 Sunday 09 June 2024 04:19:31 +0000 (0:00:00.013) 0:04:28.367 *********** TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:54 Sunday 09 June 2024 04:19:31 +0000 (0:00:00.011) 0:04:28.379 *********** ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Create a file] *********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/create-test-file.yml:12 Sunday 09 June 2024 04:19:31 +0000 (0:00:00.013) 0:04:28.392 *********** changed: [sut] => { "changed": true, "dest": "/opt/test1/quux", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "secontext": "unconfined_u:object_r:unlabeled_t:s0", "size": 0, "state": "file", "uid": 0 } TASK [Test for correct handling of safe_mode] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:451 Sunday 09 June 2024 04:19:31 +0000 (0:00:00.179) 0:04:28.572 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml for sut TASK [Store global variable value copy] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:4 Sunday 09 June 2024 04:19:31 +0000 (0:00:00.026) 0:04:28.598 *********** ok: [sut] => { "ansible_facts": { "storage_pools_global": [], "storage_safe_mode_global": true, "storage_volumes_global": [] }, "changed": false } TASK [Verify role raises correct error] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:10 Sunday 09 June 2024 04:19:31 +0000 (0:00:00.016) 0:04:28.615 *********** TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Sunday 09 June 2024 04:19:31 +0000 (0:00:00.016) 0:04:28.632 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Sunday 09 June 2024 04:19:31 +0000 (0:00:00.018) 0:04:28.650 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Sunday 09 June 2024 04:19:31 +0000 (0:00:00.014) 0:04:28.664 *********** skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [linux-system-roles.storage : Check if system is ostree] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:26 Sunday 09 June 2024 04:19:31 +0000 (0:00:00.033) 0:04:28.698 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set flag to indicate system is ostree] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:31 Sunday 09 June 2024 04:19:31 +0000 (0:00:00.015) 0:04:28.714 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Sunday 09 June 2024 04:19:31 +0000 (0:00:00.014) 0:04:28.728 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Sunday 09 June 2024 04:19:31 +0000 (0:00:00.012) 0:04:28.740 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Sunday 09 June 2024 04:19:31 +0000 (0:00:00.012) 0:04:28.753 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Sunday 09 June 2024 04:19:31 +0000 (0:00:00.026) 0:04:28.780 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kmod-kvdo libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet vdo xfsprogs TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:11 Sunday 09 June 2024 04:19:32 +0000 (0:00:00.861) 0:04:29.641 *********** ok: [sut] => { "storage_pools": [ { "disks": [ "sda" ], "name": "foo", "type": "lvm", "volumes": [ { "encryption": true, "encryption_password": "yabbadabbadoo", "mount_point": "/opt/test1", "name": "test1", "size": "4g" } ] } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:16 Sunday 09 June 2024 04:19:32 +0000 (0:00:00.016) 0:04:29.658 *********** ok: [sut] => { "storage_volumes": [] } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:21 Sunday 09 June 2024 04:19:32 +0000 (0:00:00.016) 0:04:29.675 *********** ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "cryptsetup", "lvm2" ], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:34 Sunday 09 June 2024 04:19:33 +0000 (0:00:01.160) 0:04:30.835 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for sut TASK [linux-system-roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Sunday 09 June 2024 04:19:33 +0000 (0:00:00.023) 0:04:30.859 *********** TASK [linux-system-roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Sunday 09 June 2024 04:19:33 +0000 (0:00:00.012) 0:04:30.871 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable COPRs] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:20 Sunday 09 June 2024 04:19:33 +0000 (0:00:00.013) 0:04:30.884 *********** TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:41 Sunday 09 June 2024 04:19:33 +0000 (0:00:00.013) 0:04:30.898 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup kpartx lvm2 TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 Sunday 09 June 2024 04:19:34 +0000 (0:00:00.880) 0:04:31.779 *********** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service": { "name": "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "source": "systemd", "state": "stopped", "status": "generated" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:64 Sunday 09 June 2024 04:19:35 +0000 (0:00:01.476) 0:04:33.256 *********** ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [ "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service" ] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:78 Sunday 09 June 2024 04:19:35 +0000 (0:00:00.020) 0:04:33.277 *********** changed: [sut] => (item=systemd-cryptsetup@luks\x2d25f92b54\x2da245\x2d4387\x2d8b24\x2dafa23facba1b.service) => { "ansible_loop_var": "item", "changed": true, "item": "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "name": "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "cryptsetup-pre.target \"dev-mapper-foo\\\\x2dtest1.device\" systemd-journald.socket systemd-udevd-kernel.socket \"system-systemd\\\\x2dcryptsetup.slice\"", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "umount.target cryptsetup.target \"blockdev@dev-mapper-luks\\\\x2d25f92b54\\\\x2da245\\\\x2d4387\\\\x2d8b24\\\\x2dafa23facba1b.target\"", "BindsTo": "\"dev-mapper-foo\\\\x2dtest1.device\"", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "umount.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "no", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Cryptography Setup for luks-25f92b54-a245-4387-8b24-afa23facba1b", "DevicePolicy": "auto", "Documentation": "\"man:crypttab(5)\" \"man:systemd-cryptsetup-generator(8)\" \"man:systemd-cryptsetup@.service(8)\"", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup attach luks-25f92b54-a245-4387-8b24-afa23facba1b /dev/mapper/foo-test1 - ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup attach luks-25f92b54-a245-4387-8b24-afa23facba1b /dev/mapper/foo-test1 - ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup detach luks-25f92b54-a245-4387-8b24-afa23facba1b ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/lib/systemd/systemd-cryptsetup ; argv[]=/usr/lib/systemd/systemd-cryptsetup detach luks-25f92b54-a245-4387-8b24-afa23facba1b ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "IgnoreOnIsolate": "yes", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "shared", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13972", "LimitNPROCSoft": "13972", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13972", "LimitSIGPENDINGSoft": "13972", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "\"systemd-cryptsetup@luks\\\\x2d25f92b54\\\\x2da245\\\\x2d4387\\\\x2d8b24\\\\x2dafa23facba1b.service\"", "NeedDaemonReload": "yes", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "500", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "RequiredBy": "cryptsetup.target", "Requires": "\"system-systemd\\\\x2dcryptsetup.slice\"", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system-systemd\\x2dcryptsetup.slice", "SourcePath": "/etc/crypttab", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sun 2024-06-09 04:19:00 UTC", "StateChangeTimestampMonotonic": "681384032", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22356", "TimeoutAbortUSec": "infinity", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "infinity", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "\"blockdev@dev-mapper-luks\\\\x2d25f92b54\\\\x2da245\\\\x2d4387\\\\x2d8b24\\\\x2dafa23facba1b.target\"", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 Sunday 09 June 2024 04:19:36 +0000 (0:00:00.512) 0:04:33.790 *********** fatal: [sut]: FAILED! => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } MSG: cannot remove existing formatting on device 'test1' in safe mode due to adding encryption TASK [linux-system-roles.storage : Failed message] ***************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:106 Sunday 09 June 2024 04:19:37 +0000 (0:00:01.177) 0:04:34.967 *********** fatal: [sut]: FAILED! => { "changed": false } MSG: {'msg': "cannot remove existing formatting on device 'test1' in safe mode due to adding encryption", 'changed': False, 'actions': [], 'leaves': [], 'mounts': [], 'crypts': [], 'pools': [], 'volumes': [], 'packages': [], 'failed': True, 'invocation': {'module_args': {'pools': [{'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None}]}], 'volumes': [], 'use_partitions': None, 'disklabel_type': None, 'pool_defaults': {'state': 'present', 'type': 'lvm', 'disks': [], 'volumes': [], 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'shared': False}, 'volume_defaults': {'state': 'present', 'type': 'lvm', 'size': 0, 'disks': [], 'fs_type': 'xfs', 'fs_label': '', 'fs_create_options': '', 'fs_overwrite_existing': True, 'mount_point': '', 'mount_options': 'defaults', 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_stripe_size': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': []}, 'safe_mode': True, 'packages_only': False, 'diskvolume_mkfs_option_map': {}}}, '_ansible_no_log': False} TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Sunday 09 June 2024 04:19:37 +0000 (0:00:00.017) 0:04:34.984 *********** changed: [sut] => (item=systemd-cryptsetup@luks\x2d25f92b54\x2da245\x2d4387\x2d8b24\x2dafa23facba1b.service) => { "ansible_loop_var": "item", "changed": true, "item": "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "name": "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "no", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "DevicePolicy": "auto", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/etc/systemd/system/systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "463069184", "LimitMEMLOCKSoft": "463069184", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "1073741816", "LimitNOFILESoft": "1073741816", "LimitNPROC": "13972", "LimitNPROCSoft": "13972", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13972", "LimitSIGPENDINGSoft": "13972", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadError": "org.freedesktop.systemd1.UnitMasked \"Unit systemd-cryptsetup@luks\\x2d25f92b54\\x2da245\\x2d4387\\x2d8b24\\x2dafa23facba1b.service is masked.\"", "LoadState": "masked", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "\"systemd-cryptsetup@luks\\\\x2d25f92b54\\\\x2da245\\\\x2d4387\\\\x2d8b24\\\\x2dafa23facba1b.service\"", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "inherit", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22356", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "masked", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [Check that we failed in the role] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:29 Sunday 09 June 2024 04:19:38 +0000 (0:00:00.510) 0:04:35.495 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the blivet output and error message are correct] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:34 Sunday 09 June 2024 04:19:38 +0000 (0:00:00.015) 0:04:35.510 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify correct exception or error message] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-failed.yml:45 Sunday 09 June 2024 04:19:38 +0000 (0:00:00.018) 0:04:35.528 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Stat the file] *********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-data-preservation.yml:11 Sunday 09 June 2024 04:19:38 +0000 (0:00:00.012) 0:04:35.541 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906771.2008, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1717906771.2008, "dev": 64768, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0644", "mtime": 1717906771.2008, "nlink": 1, "path": "/opt/test1/quux", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 0, "uid": 0, "version": "3633539013", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Assert file presence] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-data-preservation.yml:16 Sunday 09 June 2024 04:19:38 +0000 (0:00:00.183) 0:04:35.725 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Add encryption to the volume] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:474 Sunday 09 June 2024 04:19:38 +0000 (0:00:00.016) 0:04:35.741 *********** TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Sunday 09 June 2024 04:19:38 +0000 (0:00:00.038) 0:04:35.779 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Sunday 09 June 2024 04:19:38 +0000 (0:00:00.018) 0:04:35.797 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Sunday 09 June 2024 04:19:38 +0000 (0:00:00.015) 0:04:35.812 *********** skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [linux-system-roles.storage : Check if system is ostree] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:26 Sunday 09 June 2024 04:19:38 +0000 (0:00:00.035) 0:04:35.847 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set flag to indicate system is ostree] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:31 Sunday 09 June 2024 04:19:38 +0000 (0:00:00.013) 0:04:35.861 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Sunday 09 June 2024 04:19:38 +0000 (0:00:00.012) 0:04:35.873 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Sunday 09 June 2024 04:19:38 +0000 (0:00:00.011) 0:04:35.885 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Sunday 09 June 2024 04:19:38 +0000 (0:00:00.011) 0:04:35.897 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Sunday 09 June 2024 04:19:38 +0000 (0:00:00.026) 0:04:35.923 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kmod-kvdo libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet vdo xfsprogs TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:11 Sunday 09 June 2024 04:19:39 +0000 (0:00:00.856) 0:04:36.780 *********** ok: [sut] => { "storage_pools": [ { "disks": [ "sda" ], "name": "foo", "type": "lvm", "volumes": [ { "encryption": true, "encryption_password": "yabbadabbadoo", "mount_point": "/opt/test1", "name": "test1", "size": "4g" } ] } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:16 Sunday 09 June 2024 04:19:39 +0000 (0:00:00.016) 0:04:36.796 *********** ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:21 Sunday 09 June 2024 04:19:39 +0000 (0:00:00.014) 0:04:36.810 *********** ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "cryptsetup", "lvm2" ], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:34 Sunday 09 June 2024 04:19:40 +0000 (0:00:01.129) 0:04:37.940 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for sut TASK [linux-system-roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Sunday 09 June 2024 04:19:40 +0000 (0:00:00.025) 0:04:37.965 *********** TASK [linux-system-roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Sunday 09 June 2024 04:19:40 +0000 (0:00:00.012) 0:04:37.977 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable COPRs] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:20 Sunday 09 June 2024 04:19:40 +0000 (0:00:00.015) 0:04:37.993 *********** TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:41 Sunday 09 June 2024 04:19:40 +0000 (0:00:00.013) 0:04:38.006 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup kpartx lvm2 TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 Sunday 09 June 2024 04:19:41 +0000 (0:00:00.869) 0:04:38.875 *********** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:64 Sunday 09 June 2024 04:19:42 +0000 (0:00:01.468) 0:04:40.343 *********** ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:78 Sunday 09 June 2024 04:19:43 +0000 (0:00:00.020) 0:04:40.364 *********** TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 Sunday 09 June 2024 04:19:43 +0000 (0:00:00.011) 0:04:40.376 *********** changed: [sut] => { "actions": [ { "action": "destroy format", "device": "/dev/mapper/foo-test1", "fs_type": "xfs" }, { "action": "create format", "device": "/dev/mapper/foo-test1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "fs_type": "xfs" } ], "changed": true, "crypts": [ { "backing_device": "/dev/mapper/foo-test1", "name": "luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "password": "-", "state": "present" } ], "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "absent" }, { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "state": "mounted" } ], "packages": [ "cryptsetup", "xfsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:98 Sunday 09 June 2024 04:19:53 +0000 (0:00:10.470) 0:04:50.846 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Sunday 09 June 2024 04:19:53 +0000 (0:00:00.014) 0:04:50.860 *********** TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:116 Sunday 09 June 2024 04:19:53 +0000 (0:00:00.011) 0:04:50.872 *********** ok: [sut] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/mapper/foo-test1", "fs_type": "xfs" }, { "action": "create format", "device": "/dev/mapper/foo-test1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "fs_type": "xfs" } ], "changed": true, "crypts": [ { "backing_device": "/dev/mapper/foo-test1", "name": "luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "password": "-", "state": "present" } ], "failed": false, "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "absent" }, { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "state": "mounted" } ], "packages": [ "cryptsetup", "xfsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:125 Sunday 09 June 2024 04:19:53 +0000 (0:00:00.018) 0:04:50.891 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:129 Sunday 09 June 2024 04:19:53 +0000 (0:00:00.015) 0:04:50.907 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:145 Sunday 09 June 2024 04:19:53 +0000 (0:00:00.014) 0:04:50.921 *********** changed: [sut] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test1', 'state': 'absent', 'fstype': 'xfs'}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "absent" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:157 Sunday 09 June 2024 04:19:53 +0000 (0:00:00.180) 0:04:51.102 *********** ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:162 Sunday 09 June 2024 04:19:54 +0000 (0:00:00.489) 0:04:51.591 *********** changed: [sut] => (item={'src': '/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03" } TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Sunday 09 June 2024 04:19:54 +0000 (0:00:00.194) 0:04:51.786 *********** skipping: [sut] => (item={'src': '/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:189 Sunday 09 June 2024 04:19:54 +0000 (0:00:00.017) 0:04:51.804 *********** ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:197 Sunday 09 June 2024 04:19:54 +0000 (0:00:00.481) 0:04:52.285 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906766.3747818, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1717906765.1947775, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 367001803, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1717906765.1947775, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "784245288", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:202 Sunday 09 June 2024 04:19:55 +0000 (0:00:00.179) 0:04:52.464 *********** changed: [sut] => (item={'backing_device': '/dev/mapper/foo-test1', 'name': 'luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03', 'password': '-', 'state': 'present'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/mapper/foo-test1", "name": "luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "password": "-", "state": "present" } } MSG: line added TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:224 Sunday 09 June 2024 04:19:55 +0000 (0:00:00.181) 0:04:52.646 *********** ok: [sut] TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:490 Sunday 09 June 2024 04:19:55 +0000 (0:00:00.606) 0:04:53.253 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:2 Sunday 09 June 2024 04:19:55 +0000 (0:00:00.027) 0:04:53.280 *********** ok: [sut] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:7 Sunday 09 June 2024 04:19:55 +0000 (0:00:00.018) 0:04:53.298 *********** skipping: [sut] => {} TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:15 Sunday 09 June 2024 04:19:55 +0000 (0:00:00.013) 0:04:53.312 *********** ok: [sut] => { "changed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "crypto_LUKS", "label": "", "name": "/dev/mapper/foo-test1", "size": "4G", "type": "lvm", "uuid": "d169c40b-28b8-4e2b-9050-a6ebfe06bb03" }, "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03": { "fstype": "xfs", "label": "", "name": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "size": "4G", "type": "crypt", "uuid": "3a1729fe-0f76-4178-919c-f69ca6b386cc" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "1agGsq-N3WZ-Sn5I-mLjB-j4Aw-8XPP-3ZXjSl" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "xfs", "label": "", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "82a26900-2b36-49b9-bc49-f4d2f0bb14b8" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:20 Sunday 09 June 2024 04:19:56 +0000 (0:00:00.176) 0:04:53.488 *********** ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003094", "end": "2024-06-09 04:19:56.297398", "rc": 0, "start": "2024-06-09 04:19:56.294304" } STDOUT: # # /etc/fstab # Created by anaconda on Tue May 28 13:15:44 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=82a26900-2b36-49b9-bc49-f4d2f0bb14b8 / xfs defaults 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03 /opt/test1 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:25 Sunday 09 June 2024 04:19:56 +0000 (0:00:00.181) 0:04:53.669 *********** ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002973", "end": "2024-06-09 04:19:56.478978", "failed_when_result": false, "rc": 0, "start": "2024-06-09 04:19:56.476005" } STDOUT: luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03 /dev/mapper/foo-test1 - TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:34 Sunday 09 June 2024 04:19:56 +0000 (0:00:00.181) 0:04:53.851 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml for sut TASK [Set _storage_pool_tests] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:5 Sunday 09 June 2024 04:19:56 +0000 (0:00:00.027) 0:04:53.878 *********** ok: [sut] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:18 Sunday 09 June 2024 04:19:56 +0000 (0:00:00.011) 0:04:53.890 *********** ok: [sut] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.031346", "end": "2024-06-09 04:19:56.726510", "rc": 0, "start": "2024-06-09 04:19:56.695164" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:24 Sunday 09 June 2024 04:19:56 +0000 (0:00:00.209) 0:04:54.100 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool.yml:34 Sunday 09 June 2024 04:19:56 +0000 (0:00:00.018) 0:04:54.119 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-volumes.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:2 Sunday 09 June 2024 04:19:56 +0000 (0:00:00.026) 0:04:54.146 *********** ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:13 Sunday 09 June 2024 04:19:56 +0000 (0:00:00.018) 0:04:54.164 *********** ok: [sut] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:22 Sunday 09 June 2024 04:19:56 +0000 (0:00:00.171) 0:04:54.335 *********** ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:27 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.015) 0:04:54.350 *********** ok: [sut] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:33 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.016) 0:04:54.367 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:42 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.016) 0:04:54.384 *********** ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:48 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.014) 0:04:54.398 *********** ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:54 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.014) 0:04:54.413 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:59 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.012) 0:04:54.426 *********** ok: [sut] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check MD RAID] *********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:73 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.019) 0:04:54.446 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml for sut TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:8 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.057) 0:04:54.503 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:14 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.013) 0:04:54.516 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:21 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.012) 0:04:54.529 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:28 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.013) 0:04:54.542 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:35 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.014) 0:04:54.556 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:45 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.013) 0:04:54.570 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:54 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.013) 0:04:54.583 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:64 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.012) 0:04:54.595 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:74 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.012) 0:04:54.608 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:85 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.012) 0:04:54.621 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-md.yml:95 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.014) 0:04:54.635 *********** ok: [sut] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:76 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.011) 0:04:54.647 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-lvmraid.yml for sut TASK [Validate pool member LVM RAID settings] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-lvmraid.yml:2 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.023) 0:04:54.671 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml for sut TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:8 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.025) 0:04:54.696 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:16 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.013) 0:04:54.710 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:21 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.013) 0:04:54.723 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:29 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.012) 0:04:54.736 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:34 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.012) 0:04:54.748 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:40 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.012) 0:04:54.761 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-lvmraid.yml:46 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.014) 0:04:54.776 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:79 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.012) 0:04:54.789 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-thin.yml for sut TASK [Validate pool member thinpool settings] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-thin.yml:2 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.024) 0:04:54.813 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-thin.yml for sut TASK [Get information about thinpool] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-thin.yml:8 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.027) 0:04:54.840 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-thin.yml:16 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.013) 0:04:54.853 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-thin.yml:23 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.013) 0:04:54.866 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-thin.yml:27 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.013) 0:04:54.880 *********** ok: [sut] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:82 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.011) 0:04:54.892 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:5 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.028) 0:04:54.920 *********** ok: [sut] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:13 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.015) 0:04:54.935 *********** skipping: [sut] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:20 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.015) 0:04:54.951 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml for sut TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml:2 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.023) 0:04:54.974 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml:9 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.016) 0:04:54.991 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml:18 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.016) 0:04:55.007 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml:27 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.013) 0:04:55.020 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml:37 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.013) 0:04:55.034 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-crypttab.yml:47 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.012) 0:04:55.046 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-encryption.yml:27 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.011) 0:04:55.058 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:85 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.013) 0:04:55.072 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-vdo.yml for sut TASK [Validate pool member VDO settings] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-members-vdo.yml:2 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.028) 0:04:55.100 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml for sut TASK [Get information about VDO deduplication] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:9 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.029) 0:04:55.129 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:16 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.015) 0:04:55.144 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:22 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.015) 0:04:55.160 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:28 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.014) 0:04:55.175 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:35 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.015) 0:04:55.190 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:41 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.014) 0:04:55.204 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-pool-member-vdo.yml:47 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.014) 0:04:55.219 *********** ok: [sut] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-members.yml:88 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.012) 0:04:55.232 *********** ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-pool-volumes.yml:3 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.013) 0:04:55.245 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml for sut TASK [Set storage volume test variables] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:2 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.023) 0:04:55.269 *********** ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for {{ storage_test_volume_subset }}] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:21 Sunday 09 June 2024 04:19:57 +0000 (0:00:00.016) 0:04:55.285 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml for sut TASK [Get expected mount device based on device type] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:7 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.133) 0:04:55.419 *********** ok: [sut] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:16 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.015) 0:04:55.435 *********** ok: [sut] => { "ansible_facts": { "storage_test_mount_device_matches": [ { "block_available": 1012662, "block_size": 4096, "block_total": 1028096, "block_used": 15434, "device": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "fstype": "xfs", "inode_available": 2088957, "inode_total": 2088960, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 4147863552, "size_total": 4211081216, "uuid": "3a1729fe-0f76-4178-919c-f69ca6b386cc" } ], "storage_test_mount_expected_match_count": "1", "storage_test_mount_point_matches": [ { "block_available": 1012662, "block_size": 4096, "block_total": 1028096, "block_used": 15434, "device": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "fstype": "xfs", "inode_available": 2088957, "inode_total": 2088960, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", "size_available": 4147863552, "size_total": 4211081216, "uuid": "3a1729fe-0f76-4178-919c-f69ca6b386cc" } ], "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:38 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.020) 0:04:55.455 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:51 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.013) 0:04:55.468 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the current mount state by mount point] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:63 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.017) 0:04:55.486 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:71 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.014) 0:04:55.501 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:83 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.012) 0:04:55.513 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:95 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.012) 0:04:55.526 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the mount fs type] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:110 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.012) 0:04:55.539 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Get path of test volume device] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:122 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.017) 0:04:55.556 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:128 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.014) 0:04:55.571 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:134 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.012) 0:04:55.584 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:146 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.012) 0:04:55.597 *********** ok: [sut] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_device_matches": null, "storage_test_mount_expected_match_count": null, "storage_test_mount_point_matches": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:2 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.011) 0:04:55.609 *********** ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:40 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.026) 0:04:55.635 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:48 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.016) 0:04:55.651 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:58 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.016) 0:04:55.668 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:71 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.013) 0:04:55.681 *********** ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml:3 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.011) 0:04:55.693 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml:12 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.017) 0:04:55.710 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:3 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.018) 0:04:55.729 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906793.0708811, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1717906793.0708811, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 1667, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1717906793.0708811, "nlink": 1, "path": "/dev/mapper/foo-test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:9 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.177) 0:04:55.907 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:16 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.017) 0:04:55.925 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:24 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.012) 0:04:55.938 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:30 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.014) 0:04:55.953 *********** ok: [sut] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:34 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.015) 0:04:55.968 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:39 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.013) 0:04:55.981 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:3 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.015) 0:04:55.996 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906793.4088824, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1717906793.4088824, "dev": 5, "device_type": 64769, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 1735, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1717906793.4088824, "nlink": 1, "path": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:10 Sunday 09 June 2024 04:19:58 +0000 (0:00:00.177) 0:04:56.173 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:17 Sunday 09 June 2024 04:19:59 +0000 (0:00:00.859) 0:04:57.033 *********** ok: [sut] => { "changed": false, "cmd": [ "cryptsetup", "luksDump", "/dev/mapper/foo-test1" ], "delta": "0:00:00.007215", "end": "2024-06-09 04:19:59.844979", "rc": 0, "start": "2024-06-09 04:19:59.837764" } STDOUT: LUKS header information Version: 2 Epoch: 3 Metadata area: 16384 [bytes] Keyslots area: 16744448 [bytes] UUID: d169c40b-28b8-4e2b-9050-a6ebfe06bb03 Label: (no label) Subsystem: (no subsystem) Flags: (no flags) Data segments: 0: crypt offset: 16777216 [bytes] length: (whole device) cipher: aes-xts-plain64 sector: 512 [bytes] Keyslots: 0: luks2 Key: 512 bits Priority: normal Cipher: aes-xts-plain64 Cipher key: 512 bits PBKDF: argon2id Time cost: 4 Memory: 670696 Threads: 2 Salt: 9e db 64 53 c6 ff 0a c3 79 77 49 a2 83 f0 ae 38 ac e8 44 60 cc 10 bd f9 0a 3b c4 b4 e0 2b 99 a6 AF stripes: 4000 AF hash: sha256 Area offset:32768 [bytes] Area length:258048 [bytes] Digest ID: 0 Tokens: Digests: 0: pbkdf2 Hash: sha256 Iterations: 103369 Salt: 88 78 47 d9 d9 cc 6e 43 68 28 da 10 bd cb 44 7b 1e 51 89 bb 06 9c db 49 4c 9d 27 aa 13 c3 1a 61 Digest: b1 54 7a 5f 19 52 22 79 86 c6 d6 7b c0 9c d3 f0 d4 74 6e b3 6c de 31 51 34 47 c8 2a 67 1c 13 c2 TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:23 Sunday 09 June 2024 04:19:59 +0000 (0:00:00.185) 0:04:57.218 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:32 Sunday 09 June 2024 04:19:59 +0000 (0:00:00.017) 0:04:57.236 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:45 Sunday 09 June 2024 04:19:59 +0000 (0:00:00.017) 0:04:57.254 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:51 Sunday 09 June 2024 04:19:59 +0000 (0:00:00.016) 0:04:57.270 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:56 Sunday 09 June 2024 04:19:59 +0000 (0:00:00.016) 0:04:57.287 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:69 Sunday 09 June 2024 04:19:59 +0000 (0:00:00.014) 0:04:57.301 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:81 Sunday 09 June 2024 04:19:59 +0000 (0:00:00.014) 0:04:57.315 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:94 Sunday 09 June 2024 04:19:59 +0000 (0:00:00.013) 0:04:57.329 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [ "luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03 /dev/mapper/foo-test1 -" ], "_storage_test_expected_crypttab_entries": "1", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:106 Sunday 09 June 2024 04:19:59 +0000 (0:00:00.016) 0:04:57.345 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:114 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.015) 0:04:57.361 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:122 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.017) 0:04:57.379 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:131 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.019) 0:04:57.398 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:140 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.017) 0:04:57.416 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:8 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.012) 0:04:57.428 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:14 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.013) 0:04:57.441 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:21 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.012) 0:04:57.454 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:28 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.012) 0:04:57.467 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:35 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.014) 0:04:57.482 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:45 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.013) 0:04:57.495 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:54 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.013) 0:04:57.508 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:63 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.013) 0:04:57.522 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:72 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.013) 0:04:57.535 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:81 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.012) 0:04:57.548 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:3 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.014) 0:04:57.562 *********** ok: [sut] => { "bytes": 4294967296, "changed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:11 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.177) 0:04:57.740 *********** ok: [sut] => { "bytes": 4294967296, "changed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } TASK [Establish base value for expected size] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:20 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.175) 0:04:57.915 *********** ok: [sut] => { "ansible_facts": { "storage_test_expected_size": "4294967296" }, "changed": false } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:28 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.018) 0:04:57.933 *********** ok: [sut] => { "storage_test_expected_size": "4294967296" } TASK [Get the size of parent/pool device] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:32 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.014) 0:04:57.948 *********** ok: [sut] => { "bytes": 10726680821, "changed": false, "lvm": "9g", "parted": "9GiB", "size": "9 GiB" } TASK [Show test pool] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:46 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.172) 0:04:58.120 *********** skipping: [sut] => {} TASK [Show test blockinfo] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:50 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.016) 0:04:58.137 *********** skipping: [sut] => {} TASK [Show test pool size] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:54 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.015) 0:04:58.153 *********** skipping: [sut] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:58 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.017) 0:04:58.170 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:68 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.016) 0:04:58.186 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:72 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.013) 0:04:58.200 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:77 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.015) 0:04:58.215 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:83 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.013) 0:04:58.228 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:88 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.013) 0:04:58.241 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:96 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.012) 0:04:58.254 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:104 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.013) 0:04:58.267 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:109 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.012) 0:04:58.280 *********** skipping: [sut] => {} TASK [Show volume thin pool size] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:113 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.014) 0:04:58.295 *********** skipping: [sut] => {} TASK [Show test volume size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:117 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.013) 0:04:58.308 *********** skipping: [sut] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:121 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.013) 0:04:58.321 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:129 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.013) 0:04:58.335 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:138 Sunday 09 June 2024 04:20:00 +0000 (0:00:00.013) 0:04:58.348 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:142 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.012) 0:04:58.361 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:150 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.014) 0:04:58.375 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:156 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.012) 0:04:58.388 *********** ok: [sut] => { "storage_test_actual_size": { "bytes": 4294967296, "changed": false, "failed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:160 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.014) 0:04:58.402 *********** ok: [sut] => { "storage_test_expected_size": "4294967296" } TASK [Assert expected size is actual size] ************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:164 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.014) 0:04:58.416 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:5 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.018) 0:04:58.435 *********** ok: [sut] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test1" ], "delta": "0:00:00.029488", "end": "2024-06-09 04:20:01.267610", "rc": 0, "start": "2024-06-09 04:20:01.238122" } STDOUT: LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:13 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.203) 0:04:58.639 *********** ok: [sut] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:18 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.017) 0:04:58.657 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:27 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.017) 0:04:58.674 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:35 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.014) 0:04:58.689 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:41 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.015) 0:04:58.704 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:47 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.014) 0:04:58.718 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:27 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.014) 0:04:58.733 *********** ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:44 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.013) 0:04:58.747 *********** TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:54 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.012) 0:04:58.759 *********** ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Clean up] **************************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:493 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.012) 0:04:58.771 *********** TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.042) 0:04:58.814 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.021) 0:04:58.835 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.016) 0:04:58.852 *********** skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [sut] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [linux-system-roles.storage : Check if system is ostree] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:26 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.034) 0:04:58.887 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set flag to indicate system is ostree] ****** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:31 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.014) 0:04:58.901 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.014) 0:04:58.916 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.013) 0:04:58.929 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.012) 0:04:58.942 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Sunday 09 June 2024 04:20:01 +0000 (0:00:00.025) 0:04:58.967 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kmod-kvdo libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet vdo xfsprogs TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:11 Sunday 09 June 2024 04:20:02 +0000 (0:00:00.870) 0:04:59.838 *********** ok: [sut] => { "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined" } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:16 Sunday 09 June 2024 04:20:02 +0000 (0:00:00.014) 0:04:59.853 *********** ok: [sut] => { "storage_volumes": [ { "disks": [ "sda" ], "name": "foo", "state": "absent", "type": "disk" } ] } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:21 Sunday 09 June 2024 04:20:02 +0000 (0:00:00.015) 0:04:59.868 *********** ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:34 Sunday 09 June 2024 04:20:03 +0000 (0:00:01.204) 0:05:01.073 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for sut TASK [linux-system-roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Sunday 09 June 2024 04:20:03 +0000 (0:00:00.024) 0:05:01.098 *********** TASK [linux-system-roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Sunday 09 June 2024 04:20:03 +0000 (0:00:00.016) 0:05:01.115 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable COPRs] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:20 Sunday 09 June 2024 04:20:03 +0000 (0:00:00.013) 0:05:01.129 *********** TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:41 Sunday 09 June 2024 04:20:03 +0000 (0:00:00.012) 0:05:01.141 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kpartx TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 Sunday 09 June 2024 04:20:04 +0000 (0:00:00.873) 0:05:02.014 *********** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:64 Sunday 09 June 2024 04:20:06 +0000 (0:00:01.478) 0:05:03.493 *********** ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:78 Sunday 09 June 2024 04:20:06 +0000 (0:00:00.021) 0:05:03.514 *********** TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 Sunday 09 June 2024 04:20:06 +0000 (0:00:00.012) 0:05:03.527 *********** changed: [sut] => { "actions": [ { "action": "destroy format", "device": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/foo-test1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "destroy device", "device": "/dev/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "lvmpv" } ], "changed": true, "crypts": [ { "backing_device": "/dev/mapper/foo-test1", "name": "luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "password": "-", "state": "absent" } ], "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "state": "absent" } ], "packages": [ "xfsprogs" ], "pools": [], "volumes": [ { "_device": "/dev/sda", "_mount_id": "UUID=1agGsq-N3WZ-Sn5I-mLjB-j4Aw-8XPP-3ZXjSl", "_raw_device": "/dev/sda", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "lvmpv", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 10737418240, "state": "absent", "thin": null, "thin_pool_name": null, "thin_pool_size": null, "type": "disk", "vdo_pool_size": null } ] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:98 Sunday 09 June 2024 04:20:08 +0000 (0:00:01.942) 0:05:05.469 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Sunday 09 June 2024 04:20:08 +0000 (0:00:00.014) 0:05:05.483 *********** TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:116 Sunday 09 June 2024 04:20:08 +0000 (0:00:00.012) 0:05:05.495 *********** ok: [sut] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/foo-test1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "destroy device", "device": "/dev/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "lvmpv" } ], "changed": true, "crypts": [ { "backing_device": "/dev/mapper/foo-test1", "name": "luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "password": "-", "state": "absent" } ], "failed": false, "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "state": "absent" } ], "packages": [ "xfsprogs" ], "pools": [], "volumes": [ { "_device": "/dev/sda", "_mount_id": "UUID=1agGsq-N3WZ-Sn5I-mLjB-j4Aw-8XPP-3ZXjSl", "_raw_device": "/dev/sda", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "lvmpv", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 10737418240, "state": "absent", "thin": null, "thin_pool_name": null, "thin_pool_size": null, "type": "disk", "vdo_pool_size": null } ] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:125 Sunday 09 June 2024 04:20:08 +0000 (0:00:00.015) 0:05:05.511 *********** ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:129 Sunday 09 June 2024 04:20:08 +0000 (0:00:00.015) 0:05:05.527 *********** ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [ { "_device": "/dev/sda", "_mount_id": "UUID=1agGsq-N3WZ-Sn5I-mLjB-j4Aw-8XPP-3ZXjSl", "_raw_device": "/dev/sda", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "lvmpv", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 10737418240, "state": "absent", "thin": null, "thin_pool_name": null, "thin_pool_size": null, "type": "disk", "vdo_pool_size": null } ] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:145 Sunday 09 June 2024 04:20:08 +0000 (0:00:00.015) 0:05:05.542 *********** changed: [sut] => (item={'src': '/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03', 'path': '/opt/test1', 'state': 'absent', 'fstype': 'xfs'}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "state": "absent" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03" } TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:157 Sunday 09 June 2024 04:20:08 +0000 (0:00:00.189) 0:05:05.732 *********** ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:162 Sunday 09 June 2024 04:20:08 +0000 (0:00:00.481) 0:05:06.213 *********** TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Sunday 09 June 2024 04:20:08 +0000 (0:00:00.028) 0:05:06.241 *********** TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:189 Sunday 09 June 2024 04:20:08 +0000 (0:00:00.020) 0:05:06.262 *********** ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:197 Sunday 09 June 2024 04:20:09 +0000 (0:00:00.488) 0:05:06.750 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906796.4778938, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "5764f5d959d0fe246a6e9c20d966e01f940efad9", "ctime": 1717906795.2698894, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 438304969, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0600", "mtime": 1717906795.2698894, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 66, "uid": 0, "version": "2975691497", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:202 Sunday 09 June 2024 04:20:09 +0000 (0:00:00.197) 0:05:06.948 *********** changed: [sut] => (item={'backing_device': '/dev/mapper/foo-test1', 'name': 'luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03', 'password': '-', 'state': 'absent'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/mapper/foo-test1", "name": "luks-d169c40b-28b8-4e2b-9050-a6ebfe06bb03", "password": "-", "state": "absent" }, "found": 1 } MSG: 1 line(s) removed TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:224 Sunday 09 June 2024 04:20:09 +0000 (0:00:00.195) 0:05:07.143 *********** ok: [sut] TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/tests_luks.yml:503 Sunday 09 June 2024 04:20:10 +0000 (0:00:00.594) 0:05:07.737 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:2 Sunday 09 June 2024 04:20:10 +0000 (0:00:00.027) 0:05:07.765 *********** skipping: [sut] => {} TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:7 Sunday 09 June 2024 04:20:10 +0000 (0:00:00.013) 0:05:07.778 *********** ok: [sut] => { "_storage_volumes_list": [ { "_device": "/dev/sda", "_mount_id": "UUID=1agGsq-N3WZ-Sn5I-mLjB-j4Aw-8XPP-3ZXjSl", "_raw_device": "/dev/sda", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "lvmpv", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 10737418240, "state": "absent", "thin": null, "thin_pool_name": null, "thin_pool_size": null, "type": "disk", "vdo_pool_size": null } ] } TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:15 Sunday 09 June 2024 04:20:10 +0000 (0:00:00.016) 0:05:07.794 *********** ok: [sut] => { "changed": false, "info": { "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "xfs", "label": "", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "82a26900-2b36-49b9-bc49-f4d2f0bb14b8" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:20 Sunday 09 June 2024 04:20:10 +0000 (0:00:00.193) 0:05:07.988 *********** ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002591", "end": "2024-06-09 04:20:10.787435", "rc": 0, "start": "2024-06-09 04:20:10.784844" } STDOUT: # # /etc/fstab # Created by anaconda on Tue May 28 13:15:44 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=82a26900-2b36-49b9-bc49-f4d2f0bb14b8 / xfs defaults 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:25 Sunday 09 June 2024 04:20:10 +0000 (0:00:00.172) 0:05:08.160 *********** ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002447", "end": "2024-06-09 04:20:10.958292", "failed_when_result": false, "rc": 0, "start": "2024-06-09 04:20:10.955845" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:34 Sunday 09 June 2024 04:20:10 +0000 (0:00:00.170) 0:05:08.330 *********** TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:44 Sunday 09 June 2024 04:20:10 +0000 (0:00:00.011) 0:05:08.342 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml for sut TASK [Set storage volume test variables] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:2 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.023) 0:05:08.365 *********** ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": false, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for {{ storage_test_volume_subset }}] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:21 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.016) 0:05:08.382 *********** included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml for sut included: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml for sut TASK [Get expected mount device based on device type] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:7 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.052) 0:05:08.434 *********** ok: [sut] => { "ansible_facts": { "storage_test_device_path": "/dev/sda" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:16 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.015) 0:05:08.450 *********** ok: [sut] => { "ansible_facts": { "storage_test_mount_device_matches": [], "storage_test_mount_expected_match_count": "0", "storage_test_mount_point_matches": [], "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:38 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.018) 0:05:08.468 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:51 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.012) 0:05:08.481 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by mount point] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:63 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.013) 0:05:08.494 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:71 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.014) 0:05:08.509 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:83 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.012) 0:05:08.522 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:95 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.012) 0:05:08.535 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the mount fs type] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:110 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.012) 0:05:08.547 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:122 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.012) 0:05:08.560 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:128 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.013) 0:05:08.573 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:134 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.012) 0:05:08.586 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-mount.yml:146 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.012) 0:05:08.599 *********** ok: [sut] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_device_matches": null, "storage_test_mount_expected_match_count": null, "storage_test_mount_point_matches": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:2 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.013) 0:05:08.612 *********** ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "0", "storage_test_fstab_expected_mount_options_matches": "0", "storage_test_fstab_expected_mount_point_matches": "0", "storage_test_fstab_id_matches": [], "storage_test_fstab_mount_options_matches": [], "storage_test_fstab_mount_point_matches": [] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:40 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.027) 0:05:08.640 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the fstab mount point] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:48 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.014) 0:05:08.655 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:58 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.018) 0:05:08.673 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fstab.yml:71 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.013) 0:05:08.686 *********** ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml:3 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.013) 0:05:08.699 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fs label] ********************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-fs.yml:12 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.013) 0:05:08.713 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [See whether the device node is present] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:3 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.013) 0:05:08.726 *********** ok: [sut] => { "changed": false, "stat": { "atime": 1717906807.995941, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1717906807.995941, "dev": 5, "device_type": 2048, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 446, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/blockdevice", "mode": "0660", "mtime": 1717906807.995941, "nlink": 1, "path": "/dev/sda", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:9 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.182) 0:05:08.908 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:16 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.017) 0:05:08.926 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:24 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.012) 0:05:08.938 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Process volume type (set initial value) (1/2)] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:30 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.010) 0:05:08.949 *********** ok: [sut] => { "ansible_facts": { "st_volume_type": "disk" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:34 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.013) 0:05:08.963 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-device.yml:39 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.013) 0:05:08.976 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Stat the LUKS device, if encrypted] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:3 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.010) 0:05:08.987 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:10 Sunday 09 June 2024 04:20:11 +0000 (0:00:00.012) 0:05:08.999 *********** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:17 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.863) 0:05:09.862 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:23 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.022) 0:05:09.885 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:32 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.013) 0:05:09.898 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:45 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.010) 0:05:09.909 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:51 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.012) 0:05:09.921 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:56 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.012) 0:05:09.934 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:69 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.009) 0:05:09.943 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:81 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.011) 0:05:09.955 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:94 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.009) 0:05:09.964 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:106 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.017) 0:05:09.981 *********** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:114 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.017) 0:05:09.999 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:122 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.014) 0:05:10.013 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:131 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.015) 0:05:10.029 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-encryption.yml:140 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.015) 0:05:10.044 *********** ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:8 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.056) 0:05:10.101 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:14 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.014) 0:05:10.115 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:21 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.035) 0:05:10.150 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:28 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.015) 0:05:10.166 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:35 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.014) 0:05:10.181 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:45 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.013) 0:05:10.194 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:54 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.015) 0:05:10.209 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:63 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.015) 0:05:10.224 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:72 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.013) 0:05:10.238 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-md.yml:81 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.013) 0:05:10.251 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:3 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.012) 0:05:10.264 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested size of the volume] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:11 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.012) 0:05:10.276 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:20 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.014) 0:05:10.291 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:28 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.012) 0:05:10.304 *********** ok: [sut] => { "storage_test_expected_size": "4294967296" } TASK [Get the size of parent/pool device] ************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:32 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.016) 0:05:10.320 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show test pool] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:46 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.013) 0:05:10.333 *********** skipping: [sut] => {} TASK [Show test blockinfo] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:50 Sunday 09 June 2024 04:20:12 +0000 (0:00:00.012) 0:05:10.346 *********** skipping: [sut] => {} TASK [Show test pool size] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:54 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.012) 0:05:10.359 *********** skipping: [sut] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:58 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.013) 0:05:10.373 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:68 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.013) 0:05:10.386 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:72 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.012) 0:05:10.399 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:77 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.012) 0:05:10.412 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:83 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.013) 0:05:10.425 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:88 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.012) 0:05:10.438 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:96 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.014) 0:05:10.452 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:104 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.012) 0:05:10.465 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:109 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.013) 0:05:10.478 *********** skipping: [sut] => {} TASK [Show volume thin pool size] ********************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:113 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.013) 0:05:10.492 *********** skipping: [sut] => {} TASK [Show test volume size] *************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:117 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.014) 0:05:10.506 *********** skipping: [sut] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:121 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.012) 0:05:10.519 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:129 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.014) 0:05:10.534 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:138 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.013) 0:05:10.547 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:142 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.013) 0:05:10.560 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:150 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.013) 0:05:10.574 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:156 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.012) 0:05:10.587 *********** ok: [sut] => { "storage_test_actual_size": { "changed": false, "skip_reason": "Conditional result was False", "skipped": true } } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:160 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.013) 0:05:10.601 *********** ok: [sut] => { "storage_test_expected_size": "4294967296" } TASK [Assert expected size is actual size] ************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-size.yml:164 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.015) 0:05:10.616 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:5 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.012) 0:05:10.629 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:13 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.013) 0:05:10.642 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:18 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.012) 0:05:10.655 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV cache size] ******************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:27 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.012) 0:05:10.668 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:35 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.012) 0:05:10.680 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:41 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.014) 0:05:10.695 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume-cache.yml:47 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.013) 0:05:10.708 *********** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /WORKDIR/git-weekly-cizhupc3wu/tests/test-verify-volume.yml:27 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.013) 0:05:10.722 *********** ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-weekly-cizhupc3wu/tests/verify-role-results.yml:54 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.011) 0:05:10.734 *********** ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } META: ran handlers META: ran handlers PLAY RECAP ********************************************************************* sut : ok=1191 changed=62 unreachable=0 failed=9 skipped=1021 rescued=9 ignored=0 Sunday 09 June 2024 04:20:13 +0000 (0:00:00.007) 0:05:10.741 *********** =============================================================================== linux-system-roles.storage : Make sure blivet is available ------------- 47.67s /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 linux-system-roles.storage : Manage the pools and volumes to match the specified state -- 11.22s /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 linux-system-roles.storage : Manage the pools and volumes to match the specified state -- 10.60s /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 linux-system-roles.storage : Manage the pools and volumes to match the specified state -- 10.59s /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 linux-system-roles.storage : Manage the pools and volumes to match the specified state -- 10.47s /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 linux-system-roles.storage : Manage the pools and volumes to match the specified state --- 9.86s /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 linux-system-roles.storage : Manage the pools and volumes to match the specified state --- 8.03s /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 linux-system-roles.storage : Manage the pools and volumes to match the specified state --- 1.97s /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 linux-system-roles.storage : Manage the pools and volumes to match the specified state --- 1.94s /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 linux-system-roles.storage : Manage the pools and volumes to match the specified state --- 1.69s /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:84 linux-system-roles.storage : Make sure required packages are installed --- 1.68s /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:41 linux-system-roles.storage : Get service facts -------------------------- 1.55s /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 linux-system-roles.storage : Get service facts -------------------------- 1.51s /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 linux-system-roles.storage : Get service facts -------------------------- 1.49s /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 linux-system-roles.storage : Get service facts -------------------------- 1.49s /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 linux-system-roles.storage : Get service facts -------------------------- 1.48s /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 linux-system-roles.storage : Get service facts -------------------------- 1.48s /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 linux-system-roles.storage : Get service facts -------------------------- 1.48s /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 linux-system-roles.storage : Get service facts -------------------------- 1.48s /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 linux-system-roles.storage : Get service facts -------------------------- 1.48s /WORKDIR/git-weekly-cizhupc3wu/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:57 ---^---^---^---^---^--- # STDERR: ---v---v---v---v---v--- [DEPRECATION WARNING]: Distribution centos 9 on host sut should use /usr/libexec/platform-python, but is using /usr/bin/python for backward compatibility with prior Ansible releases. A future Ansible release will default to using the discovered platform python for this host. See https://docs .ansible.com/ansible/2.9/reference_appendices/interpreter_discovery.html for more information. This feature will be removed in version 2.12. Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg. ---^---^---^---^---^---