ansible-playbook [core 2.17.12] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-qED executable location = /usr/local/bin/ansible-playbook python version = 3.12.10 (main, May 9 2025, 00:00:00) [GCC 11.5.0 20240719 (Red Hat 11.5.0-5)] (/usr/bin/python3.12) jinja version = 3.1.6 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles statically imported: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/get_services_state.yml statically imported: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'json', as we already have a stdout callback. Skipping callback 'jsonl', as we already have a stdout callback. Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_verify_bpftrace.yml ******************************************** 2 plays in /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_bpftrace.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_bpftrace.yml:5 Monday 23 June 2025 03:01:44 -0400 (0:00:00.019) 0:00:00.019 *********** ok: [managed-node3] => { "ansible_facts": { "pcptest_pw": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n65343431623161346664373330646165636437656265656632613961363839303132393064663934\n3137396633373562393466633037356533326566343338350a386238333034336162333932313162\n62643937336534356131376134303463306466316433366636643562633637376336653034646334\n3063663466333735390a333330366461386166633233373133326237323663333831653232646566\n3363\n" } }, "ansible_included_var_files": [ "/tmp/metrics-pG6/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Test BPFTrace with SASL auth] ******************************************** TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_bpftrace.yml:9 Monday 23 June 2025 03:01:44 -0400 (0:00:00.022) 0:00:00.042 *********** [WARNING]: Platform linux on host managed-node3 is using the discovered Python interpreter at /usr/bin/python3.9, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node3] TASK [Stop test] *************************************************************** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_bpftrace.yml:21 Monday 23 June 2025 03:01:45 -0400 (0:00:01.086) 0:00:01.129 *********** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [Get initial state of services] ******************************************* task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/get_services_state.yml:3 Monday 23 June 2025 03:01:45 -0400 (0:00:00.023) 0:00:01.153 *********** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "apt-daily.service": { "name": "apt-daily.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "avahi-daemon.service": { "name": "avahi-daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcp-reboot-init.service": { "name": "pcp-reboot-init.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pmcd.service": { "name": "pmcd.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmfind.service": { "name": "pmfind.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pmie.service": { "name": "pmie.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmie_check.service": { "name": "pmie_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmie_daily.service": { "name": "pmie_daily.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmie_farm.service": { "name": "pmie_farm.service", "source": "systemd", "state": "running", "status": "disabled" }, "pmie_farm_check.service": { "name": "pmie_farm_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger.service": { "name": "pmlogger.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmlogger_check.service": { "name": "pmlogger_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger_daily.service": { "name": "pmlogger_daily.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger_farm.service": { "name": "pmlogger_farm.service", "source": "systemd", "state": "running", "status": "disabled" }, "pmlogger_farm_check.service": { "name": "pmlogger_farm_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmproxy.service": { "name": "pmproxy.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "redis.service": { "name": "redis.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles.service": { "name": "systemd-tmpfiles.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "zabbix-agent.service": { "name": "zabbix-agent.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [Run the role] ************************************************************ task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_bpftrace.yml:31 Monday 23 June 2025 03:01:47 -0400 (0:00:01.786) 0:00:02.939 *********** included: fedora.linux_system_roles.metrics for managed-node3 TASK [fedora.linux_system_roles.metrics : Ensure ansible_facts used by role] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:3 Monday 23 June 2025 03:01:47 -0400 (0:00:00.039) 0:00:02.979 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "__metrics_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add Elasticsearch to metrics domain list] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:8 Monday 23 June 2025 03:01:47 -0400 (0:00:00.019) 0:00:02.998 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_elasticsearch | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add SQL Server to metrics domain list] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:13 Monday 23 June 2025 03:01:47 -0400 (0:00:00.016) 0:00:03.015 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_mssql | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add Postfix to metrics domain list] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:18 Monday 23 June 2025 03:01:47 -0400 (0:00:00.023) 0:00:03.038 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_postfix | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add bpftrace to metrics domain list] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:23 Monday 23 June 2025 03:01:47 -0400 (0:00:00.019) 0:00:03.057 *********** ok: [managed-node3] => { "ansible_facts": { "__metrics_domains": [ "bpftrace" ] }, "changed": false } TASK [fedora.linux_system_roles.metrics : Setup metrics access for roles] ****** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:28 Monday 23 June 2025 03:01:47 -0400 (0:00:00.021) 0:00:03.079 *********** ok: [managed-node3] => { "ansible_facts": { "__metrics_accounts": [ { "saslpassword": "t;dlen;dle", "sasluser": "pcptest", "user": "pcptest" } ] }, "changed": false } TASK [Configure Elasticsearch metrics] ***************************************** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:35 Monday 23 June 2025 03:01:47 -0400 (0:00:00.040) 0:00:03.120 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_elasticsearch | d(false) | bool or metrics_into_elasticsearch | d(false) | bool\n", "skip_reason": "Conditional result was False" } TASK [Configure Spark metrics] ************************************************* task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:50 Monday 23 June 2025 03:01:47 -0400 (0:00:00.018) 0:00:03.138 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_spark | d(false) | bool or metrics_into_spark | d(false) | bool\n", "skip_reason": "Conditional result was False" } TASK [Configure SQL Server metrics.] ******************************************* task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:62 Monday 23 June 2025 03:01:47 -0400 (0:00:00.016) 0:00:03.155 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_mssql | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [Configure Postfix metrics.] ********************************************** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:70 Monday 23 June 2025 03:01:47 -0400 (0:00:00.016) 0:00:03.172 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_postfix | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [Setup bpftrace metrics.] ************************************************* task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:78 Monday 23 June 2025 03:01:47 -0400 (0:00:00.017) 0:00:03.189 *********** included: fedora.linux_system_roles.private_metrics_subrole_bpftrace for managed-node3 TASK [fedora.linux_system_roles.private_metrics_subrole_bpftrace : Set platform/version specific variables] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:4 Monday 23 June 2025 03:01:47 -0400 (0:00:00.050) 0:00:03.239 *********** ok: [managed-node3] => (item=/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/default.yml) => { "ansible_facts": { "bpftrace_metrics_provider": "pcp" }, "ansible_included_var_files": [ "/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/default.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/default.yml" } ok: [managed-node3] => (item=/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/RedHat.yml) => { "ansible_facts": {}, "ansible_included_var_files": [ "/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/RedHat.yml" } skipping: [managed-node3] => (item=/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/CentOS_9.yml) => { "ansible_facts": { "__bpftrace_packages": [ "bpftrace" ], "__bpftrace_packages_pcp": [ "pcp-pmda-bpftrace" ] }, "ansible_included_var_files": [ "/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/CentOS_9.yml" } ok: [managed-node3] => (item=/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/CentOS_9.yml) => { "ansible_facts": { "__bpftrace_packages": [ "bpftrace" ], "__bpftrace_packages_pcp": [ "pcp-pmda-bpftrace" ] }, "ansible_included_var_files": [ "/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/vars/CentOS_9.yml" } TASK [fedora.linux_system_roles.private_metrics_subrole_bpftrace : Check if system is ostree] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:18 Monday 23 June 2025 03:01:47 -0400 (0:00:00.051) 0:00:03.291 *********** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.private_metrics_subrole_bpftrace : Set flag to indicate system is ostree] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:23 Monday 23 June 2025 03:01:47 -0400 (0:00:00.500) 0:00:03.792 *********** ok: [managed-node3] => { "ansible_facts": { "__ansible_pcp_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_bpftrace : Establish bpftrace package names] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:27 Monday 23 June 2025 03:01:48 -0400 (0:00:00.063) 0:00:03.856 *********** ok: [managed-node3] => { "ansible_facts": { "__bpftrace_packages_extra": [ "bpftrace" ] }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_bpftrace : Establish bpftrace metrics package names] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:34 Monday 23 June 2025 03:01:48 -0400 (0:00:00.048) 0:00:03.905 *********** ok: [managed-node3] => { "ansible_facts": { "__bpftrace_packages_extra": [ "pcp-pmda-bpftrace", "bpftrace" ] }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_bpftrace : Install needed bpftrace metrics packages] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:41 Monday 23 June 2025 03:01:48 -0400 (0:00:00.043) 0:00:03.949 *********** changed: [managed-node3] => { "changed": true, "rc": 0, "results": [ "Installed: libomp-20.1.3-1.el9.x86_64", "Installed: libomp-devel-20.1.3-1.el9.x86_64", "Installed: pcp-pmda-bpftrace-6.3.7-4.el9.x86_64", "Installed: tcl-1:8.6.10-7.el9.x86_64", "Installed: lldb-20.1.3-1.el9.x86_64", "Installed: clang-libs-20.1.3-1.el9.x86_64", "Installed: gcc-toolset-14-libstdc++-devel-14.2.1-10.el9.x86_64", "Installed: python3-pyelftools-0.27-4.el9.noarch", "Installed: policycoreutils-python-utils-3.6-3.el9.noarch", "Installed: clang-resource-filesystem-20.1.3-1.el9.x86_64", "Installed: environment-modules-5.3.0-2.el9.x86_64", "Installed: gcc-toolset-14-binutils-2.41-5.el9.x86_64", "Installed: python3-lldb-20.1.3-1.el9.x86_64", "Installed: gcc-toolset-14-gcc-14.2.1-10.el9.x86_64", "Installed: gcc-toolset-14-runtime-14.0-2.el9.x86_64", "Installed: python3-bcc-0.32.0-2.el9.noarch", "Installed: gcc-toolset-14-gcc-c++-14.2.1-10.el9.x86_64", "Installed: llvm-filesystem-20.1.3-1.el9.x86_64", "Installed: bpftrace-0.22.1-1.el9.x86_64", "Installed: python3-netaddr-0.10.1-3.el9.noarch", "Installed: scl-utils-1:2.0.3-4.el9.x86_64", "Installed: compiler-rt-20.1.3-1.el9.x86_64", "Installed: bcc-0.32.0-2.el9.x86_64", "Installed: libatomic-11.5.0-7.el9.x86_64", "Installed: bcc-tools-0.32.0-2.el9.x86_64", "Installed: llvm-libs-20.1.3-1.el9.x86_64" ] } TASK [fedora.linux_system_roles.private_metrics_subrole_bpftrace : Extract allowed bpftrace user accounts] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:48 Monday 23 June 2025 03:02:25 -0400 (0:00:37.084) 0:00:41.033 *********** ok: [managed-node3] => (item={'user': 'pcptest', 'sasluser': 'pcptest', 'saslpassword': 't;dlen;dle'}) => { "ansible_facts": { "__bpftrace_usernames": "root,pcptest" }, "ansible_loop_var": "item", "changed": false, "item": { "saslpassword": "t;dlen;dle", "sasluser": "pcptest", "user": "pcptest" } } TASK [fedora.linux_system_roles.private_metrics_subrole_bpftrace : Ensure PCP bpftrace configuration directory exists] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:54 Monday 23 June 2025 03:02:25 -0400 (0:00:00.056) 0:00:41.090 *********** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/pcp/bpftrace", "secontext": "system_u:object_r:etc_t:s0", "size": 44, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_bpftrace : Ensure PCP bpftrace agent is configured] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:61 Monday 23 June 2025 03:02:25 -0400 (0:00:00.703) 0:00:41.793 *********** changed: [managed-node3] => { "changed": true, "checksum": "9db3c77c06fbc635e71a256adf5840e978a74535", "dest": "/etc/pcp/bpftrace/bpftrace.conf", "gid": 0, "group": "root", "md5sum": "cba354200fbfe2a6c897f60d53f7d643", "mode": "0600", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 1048, "src": "/root/.ansible/tmp/ansible-tmp-1750662146.0718596-13023-220938206642182/.source.conf", "state": "file", "uid": 0 } TASK [Setup metric querying service.] ****************************************** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:87 Monday 23 June 2025 03:02:27 -0400 (0:00:01.165) 0:00:42.958 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_query_service | bool", "skip_reason": "Conditional result was False" } TASK [Setup metric collection service.] **************************************** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:93 Monday 23 June 2025 03:02:27 -0400 (0:00:00.016) 0:00:42.974 *********** included: fedora.linux_system_roles.private_metrics_subrole_pcp for managed-node3 TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Set platform/version specific variables] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:4 Monday 23 June 2025 03:02:27 -0400 (0:00:00.053) 0:00:43.028 *********** ok: [managed-node3] => (item=/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/default.yml) => { "ansible_facts": {}, "ansible_included_var_files": [ "/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/default.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/default.yml" } ok: [managed-node3] => (item=/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/RedHat.yml) => { "ansible_facts": { "__pcp_pmcd_defaults_path": "/etc/sysconfig/pmcd", "__pcp_pmlogger_defaults_path": "/etc/sysconfig/pmlogger", "__pcp_pmlogger_timers_path": "/etc/sysconfig/pmlogger_timers", "__pcp_pmproxy_defaults_path": "/etc/sysconfig/pmproxy" }, "ansible_included_var_files": [ "/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/RedHat.yml" } skipping: [managed-node3] => (item=/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS_9.yml) => { "ansible_facts": { "__pcp_packages_extra": [ "pcp-zeroconf" ], "__pcp_sasl_mechlist": "scram-sha-256", "__pcp_sasl_packages": [ "cyrus-sasl-lib", "cyrus-sasl-scram" ] }, "ansible_included_var_files": [ "/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS_9.yml" } ok: [managed-node3] => (item=/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS_9.yml) => { "ansible_facts": { "__pcp_packages_extra": [ "pcp-zeroconf" ], "__pcp_sasl_mechlist": "scram-sha-256", "__pcp_sasl_packages": [ "cyrus-sasl-lib", "cyrus-sasl-scram" ] }, "ansible_included_var_files": [ "/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS_9.yml" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Check if system is ostree] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:18 Monday 23 June 2025 03:02:27 -0400 (0:00:00.049) 0:00:43.078 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "not __ansible_pcp_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Set flag to indicate system is ostree] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:23 Monday 23 June 2025 03:02:27 -0400 (0:00:00.020) 0:00:43.099 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "not __ansible_pcp_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Install Performance Co-Pilot packages] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:27 Monday 23 June 2025 03:02:27 -0400 (0:00:00.020) 0:00:43.119 *********** ok: [managed-node3] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Install authentication packages] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:33 Monday 23 June 2025 03:02:30 -0400 (0:00:03.326) 0:00:46.446 *********** ok: [managed-node3] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Include pmcd] **** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:42 Monday 23 June 2025 03:02:32 -0400 (0:00:01.504) 0:00:47.950 *********** included: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml for managed-node3 TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : List optional metric collection agents to be enabled] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:4 Monday 23 June 2025 03:02:32 -0400 (0:00:00.044) 0:00:47.995 *********** ok: [managed-node3] => (item=bpftrace) => {} MSG: NeedInstall agent: bpftrace from ['bpftrace'] TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Extract metric collection configuration file content] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:9 Monday 23 June 2025 03:02:32 -0400 (0:00:00.040) 0:00:48.035 *********** ok: [managed-node3] => { "changed": false, "cmd": [ "cat", "/etc/pcp/pmcd/pmcd.conf" ], "delta": "0:00:00.003106", "end": "2025-06-23 03:02:32.801509", "rc": 0, "start": "2025-06-23 03:02:32.798403" } STDOUT: # Performance Metrics Domain Specifications # # This file is automatically generated during the build # Name Id IPC IPC Params File/Cmd root 1 pipe binary /var/lib/pcp/pmdas/root/pmdaroot pmcd 2 dso pmcd_init /var/lib/pcp/pmdas/pmcd/pmda_pmcd.so proc 3 pipe binary /var/lib/pcp/pmdas/proc/pmdaproc -d 3 xfs 11 pipe binary /var/lib/pcp/pmdas/xfs/pmdaxfs -d 11 linux 60 pipe binary /var/lib/pcp/pmdas/linux/pmdalinux pmproxy 4 dso pmproxy_init /var/lib/pcp/pmdas/mmv/pmda_mmv.so mmv 70 dso mmv_init /var/lib/pcp/pmdas/mmv/pmda_mmv.so jbd2 122 dso jbd2_init /var/lib/pcp/pmdas/jbd2/pmda_jbd2.so kvm 95 pipe binary /var/lib/pcp/pmdas/kvm/pmdakvm -d 95 [access] disallow ".*" : store; disallow ":*" : store; allow "local:*" : all; TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure optional metric collection agents are enabled] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:14 Monday 23 June 2025 03:02:32 -0400 (0:00:00.654) 0:00:48.690 *********** changed: [managed-node3] => (item=bpftrace) => { "ansible_loop_var": "item", "changed": true, "dest": "/var/lib/pcp/pmdas/bpftrace/.NeedInstall", "gid": 0, "group": "root", "item": "bpftrace", "mode": "0644", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 0, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure explicit metric label path exists] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:23 Monday 23 June 2025 03:02:33 -0400 (0:00:00.578) 0:00:49.269 *********** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/pcp/labels", "secontext": "system_u:object_r:etc_t:s0", "size": 45, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure implicit metric label path exists] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:31 Monday 23 June 2025 03:02:34 -0400 (0:00:00.557) 0:00:49.827 *********** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/pcp/labels/optional", "secontext": "system_u:object_r:etc_t:s0", "size": 29, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure any explicit metric labels are configured] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:39 Monday 23 June 2025 03:02:34 -0400 (0:00:00.580) 0:00:50.407 *********** ok: [managed-node3] => { "changed": false, "checksum": "5f36b2ea290645ee34d943220a14b54ee5ea5be5", "dest": "/etc/pcp/labels/ansible-managed", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/pcp/labels/ansible-managed", "secontext": "system_u:object_r:etc_t:s0", "size": 3, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure any implicit metric labels are configured] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:46 Monday 23 June 2025 03:02:35 -0400 (0:00:00.933) 0:00:51.341 *********** ok: [managed-node3] => { "changed": false, "checksum": "5f36b2ea290645ee34d943220a14b54ee5ea5be5", "dest": "/etc/pcp/labels/optional/ansible-managed", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/pcp/labels/optional/ansible-managed", "secontext": "system_u:object_r:etc_t:s0", "size": 3, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector is configured] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:53 Monday 23 June 2025 03:02:36 -0400 (0:00:00.933) 0:00:52.274 *********** ok: [managed-node3] => { "changed": false, "checksum": "7518789c091387cd9c322e1a8fa8aad21d4efbd3", "dest": "/etc/sysconfig/pmcd", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/sysconfig/pmcd", "secontext": "system_u:object_r:etc_t:s0", "size": 1627, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector system accounts are configured] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:60 Monday 23 June 2025 03:02:37 -0400 (0:00:00.956) 0:00:53.230 *********** ok: [managed-node3] => (item={'user': 'pcptest', 'sasluser': 'pcptest', 'saslpassword': 't;dlen;dle'}) => { "ansible_loop_var": "item", "append": false, "changed": false, "comment": "", "group": 994, "home": "/home/pcptest", "item": { "saslpassword": "t;dlen;dle", "sasluser": "pcptest", "user": "pcptest" }, "move_home": false, "name": "pcptest", "shell": "/bin/bash", "state": "present", "uid": 994 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector SASL accounts are configured] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:68 Monday 23 June 2025 03:02:38 -0400 (0:00:00.755) 0:00:53.985 *********** ok: [managed-node3] => (item={'user': 'pcptest', 'sasluser': 'pcptest', 'saslpassword': 't;dlen;dle'}) => { "ansible_loop_var": "item", "changed": false, "cmd": "set -eu\nif set -o | grep -q pipefail; then\n set -o pipefail # pipefail not supported on debian, some ubuntu\nfi\nif ! sasldblistusers2 -f \"/etc/pcp/passwd.db\" | grep -q \"^pcptest@\"; then\n echo \"Creating new pcptest user in /etc/pcp/passwd.db\"\n echo \"t;dlen;dle\" | saslpasswd2 -a pmcd \"pcptest\"\n chown root:pcp \"/etc/pcp/passwd.db\"\n chmod 640 \"/etc/pcp/passwd.db\"\nfi\n", "delta": "0:00:00.008598", "end": "2025-06-23 03:02:38.675910", "item": { "saslpassword": "t;dlen;dle", "sasluser": "pcptest", "user": "pcptest" }, "rc": 0, "start": "2025-06-23 03:02:38.667312" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector authentication is configured] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:86 Monday 23 June 2025 03:02:38 -0400 (0:00:00.580) 0:00:54.565 *********** ok: [managed-node3] => { "changed": false, "checksum": "615d2de55ab86108da0c7e6b64988fecb4169771", "dest": "/etc/sasl2/pmcd.conf", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/sasl2/pmcd.conf", "secontext": "system_u:object_r:etc_t:s0", "size": 998, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Set variable to do pmcd restart if needed] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:94 Monday 23 June 2025 03:02:39 -0400 (0:00:00.948) 0:00:55.514 *********** ok: [managed-node3] => { "ansible_facts": { "__pcp_restart_pmcd": true }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Report performance metric collector restart state] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:99 Monday 23 June 2025 03:02:39 -0400 (0:00:00.044) 0:00:55.559 *********** ok: [managed-node3] => {} MSG: ['optional_agents: True', 'explicit_labels: False', 'implicit_labels: False', 'defaults_config: False', 'authentication: False', 'restart_pmcd: True'] TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector is running and enabled on boot] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:110 Monday 23 June 2025 03:02:39 -0400 (0:00:00.049) 0:00:55.608 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "not __pcp_restart_pmcd | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector is restarted and enabled on boot] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:117 Monday 23 June 2025 03:02:39 -0400 (0:00:00.028) 0:00:55.636 *********** changed: [managed-node3] => { "changed": true, "enabled": true, "name": "pmcd", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Mon 2025-06-23 03:01:30 EDT", "ActiveEnterTimestampMonotonic": "314047747", "ActiveExitTimestamp": "Mon 2025-06-23 03:01:29 EDT", "ActiveExitTimestampMonotonic": "313236809", "ActiveState": "active", "After": "systemd-journald.socket system.slice network-online.target pcp-reboot-init.service basic.target avahi-daemon.service sysinit.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Mon 2025-06-23 03:01:30 EDT", "AssertTimestampMonotonic": "313917618", "Before": "pmie.service pmlogger.service multi-user.target pmproxy.service zabbix-agent.service shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "434257000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Mon 2025-06-23 03:01:30 EDT", "ConditionTimestampMonotonic": "313917614", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/pmcd.service", "ControlGroupId": "5064", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Collector Daemon", "DevicePolicy": "auto", "Documentation": "\"man:pmcd(1)\"", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "35682", "ExecMainStartTimestamp": "Mon 2025-06-23 03:01:30 EDT", "ExecMainStartTimestampMonotonic": "314047713", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd start-systemd ; ignore_errors=no ; start_time=[Mon 2025-06-23 03:01:30 EDT] ; stop_time=[n/a] ; pid=35608 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd start-systemd ; flags= ; start_time=[Mon 2025-06-23 03:01:30 EDT] ; stop_time=[n/a] ; pid=35608 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd stop-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd stop-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pmcd.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pmcd.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Mon 2025-06-23 03:01:30 EDT", "InactiveEnterTimestampMonotonic": "313916860", "InactiveExitTimestamp": "Mon 2025-06-23 03:01:30 EDT", "InactiveExitTimestampMonotonic": "313923462", "InvocationID": "91c09c94a0fd410f9db4d73ff4a33337", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13688", "LimitNPROCSoft": "13688", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13688", "LimitSIGPENDINGSoft": "13688", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "35682", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "13332480", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pmcd.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/pcp/pmcd.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Mon 2025-06-23 03:01:30 EDT", "StateChangeTimestampMonotonic": "314047747", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "10", "TasksMax": "21900", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "pmie.service multi-user.target pmlogger.service", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Include pmie] **** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:45 Monday 23 June 2025 03:02:41 -0400 (0:00:01.982) 0:00:57.618 *********** included: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml for managed-node3 TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra performance rule group directories exist] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:4 Monday 23 June 2025 03:02:41 -0400 (0:00:00.049) 0:00:57.668 *********** ok: [managed-node3] => (item=network) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "network", "mode": "0755", "owner": "root", "path": "/etc/pcp/pmieconf/network", "secontext": "system_u:object_r:etc_t:s0", "size": 78, "state": "directory", "uid": 0 } ok: [managed-node3] => (item=power) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "power", "mode": "0755", "owner": "root", "path": "/etc/pcp/pmieconf/power", "secontext": "system_u:object_r:etc_t:s0", "size": 30, "state": "directory", "uid": 0 } ok: [managed-node3] => (item=zeroconf) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "zeroconf", "mode": "0755", "owner": "root", "path": "/etc/pcp/pmieconf/zeroconf", "secontext": "system_u:object_r:etc_t:s0", "size": 25, "state": "directory", "uid": 0 } ok: [managed-node3] => (item=filesys) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "filesys", "mode": "0755", "owner": "root", "path": "/etc/pcp/pmieconf/filesys", "secontext": "system_u:object_r:etc_t:s0", "size": 38, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra performance rule group link directories exist] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:14 Monday 23 June 2025 03:02:44 -0400 (0:00:02.302) 0:00:59.971 *********** ok: [managed-node3] => (item=network) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "network", "mode": "0755", "owner": "root", "path": "/var/lib/pcp/config/pmieconf/network", "secontext": "system_u:object_r:pcp_var_lib_t:s0", "size": 78, "state": "directory", "uid": 0 } ok: [managed-node3] => (item=power) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "power", "mode": "0755", "owner": "root", "path": "/var/lib/pcp/config/pmieconf/power", "secontext": "system_u:object_r:pcp_var_lib_t:s0", "size": 30, "state": "directory", "uid": 0 } ok: [managed-node3] => (item=zeroconf) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "zeroconf", "mode": "0755", "owner": "root", "path": "/var/lib/pcp/config/pmieconf/zeroconf", "secontext": "system_u:object_r:pcp_var_lib_t:s0", "size": 25, "state": "directory", "uid": 0 } ok: [managed-node3] => (item=filesys) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "filesys", "mode": "0755", "owner": "root", "path": "/var/lib/pcp/config/pmieconf/filesys", "secontext": "system_u:object_r:pcp_var_lib_t:s0", "size": 38, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra performance rules are installed for targeted hosts] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:24 Monday 23 June 2025 03:02:46 -0400 (0:00:02.300) 0:01:02.272 *********** ok: [managed-node3] => (item=network/tcplistenoverflows) => { "ansible_loop_var": "item", "changed": false, "checksum": "608d8a6ac6ee33bb86b77d28ba24fbcd378db43d", "dest": "/etc/pcp/pmieconf/network/tcplistenoverflows", "gid": 0, "group": "root", "item": "network/tcplistenoverflows", "mode": "0644", "owner": "root", "path": "/etc/pcp/pmieconf/network/tcplistenoverflows", "secontext": "system_u:object_r:etc_t:s0", "size": 971, "state": "file", "uid": 0 } ok: [managed-node3] => (item=network/tcpqfulldocookies) => { "ansible_loop_var": "item", "changed": false, "checksum": "3256a5c2e8d07a20d8e97a08c0ab163252b0beae", "dest": "/etc/pcp/pmieconf/network/tcpqfulldocookies", "gid": 0, "group": "root", "item": "network/tcpqfulldocookies", "mode": "0644", "owner": "root", "path": "/etc/pcp/pmieconf/network/tcpqfulldocookies", "secontext": "system_u:object_r:etc_t:s0", "size": 1131, "state": "file", "uid": 0 } ok: [managed-node3] => (item=network/tcpqfulldrops) => { "ansible_loop_var": "item", "changed": false, "checksum": "37b2bd7f2430bd9678ab078c5e69a53bea556524", "dest": "/etc/pcp/pmieconf/network/tcpqfulldrops", "gid": 0, "group": "root", "item": "network/tcpqfulldrops", "mode": "0644", "owner": "root", "path": "/etc/pcp/pmieconf/network/tcpqfulldrops", "secontext": "system_u:object_r:etc_t:s0", "size": 1129, "state": "file", "uid": 0 } ok: [managed-node3] => (item=power/thermal_throttle) => { "ansible_loop_var": "item", "changed": false, "checksum": "1d53d6182709617c8f633339652d8d9e75f3b603", "dest": "/etc/pcp/pmieconf/power/thermal_throttle", "gid": 0, "group": "root", "item": "power/thermal_throttle", "mode": "0644", "owner": "root", "path": "/etc/pcp/pmieconf/power/thermal_throttle", "secontext": "system_u:object_r:etc_t:s0", "size": 1153, "state": "file", "uid": 0 } ok: [managed-node3] => (item=zeroconf/all_threads) => { "ansible_loop_var": "item", "changed": false, "checksum": "65169db16dcaa224c211373001adc3addf1031c4", "dest": "/etc/pcp/pmieconf/zeroconf/all_threads", "gid": 0, "group": "root", "item": "zeroconf/all_threads", "mode": "0644", "owner": "root", "path": "/etc/pcp/pmieconf/zeroconf/all_threads", "secontext": "system_u:object_r:etc_t:s0", "size": 840, "state": "file", "uid": 0 } ok: [managed-node3] => (item=filesys/vfs_files) => { "ansible_loop_var": "item", "changed": false, "checksum": "cd5d85dfb8eebd7d9737d56e78bd969dafa3999c", "dest": "/etc/pcp/pmieconf/filesys/vfs_files", "gid": 0, "group": "root", "item": "filesys/vfs_files", "mode": "0644", "owner": "root", "path": "/etc/pcp/pmieconf/filesys/vfs_files", "secontext": "system_u:object_r:etc_t:s0", "size": 969, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance rule actions are installed for targeted hosts] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:34 Monday 23 June 2025 03:02:51 -0400 (0:00:05.443) 0:01:07.715 *********** ok: [managed-node3] => { "ansible_facts": { "local_pmie": "default" }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Check if global pmie webhook action is configured] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:38 Monday 23 June 2025 03:02:51 -0400 (0:00:00.022) 0:01:07.737 *********** skipping: [managed-node3] => (item=default) => { "ansible_loop_var": "item", "changed": false, "false_condition": "pcp_pmie_endpoint | length > 0", "item": "default", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Configure global webhook action] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:50 Monday 23 June 2025 03:02:51 -0400 (0:00:00.023) 0:01:07.761 *********** skipping: [managed-node3] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'pcp_pmie_endpoint | length > 0', 'item': 'default', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "pcp_pmie_endpoint | length > 0", "item": { "ansible_loop_var": "item", "changed": false, "false_condition": "pcp_pmie_endpoint | length > 0", "item": "default", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node3] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Check if global webhook endpoint is configured] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:63 Monday 23 June 2025 03:02:51 -0400 (0:00:00.026) 0:01:07.788 *********** ok: [managed-node3] => (item=default) => { "ansible_loop_var": "item", "backup": "", "changed": false, "found": 0, "item": "default" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Configure global webhook endpoint] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:73 Monday 23 June 2025 03:02:52 -0400 (0:00:00.695) 0:01:08.484 *********** skipping: [managed-node3] => (item={'changed': False, 'found': 0, 'msg': '', 'backup': '', 'diff': [{'before': '', 'after': '', 'before_header': '/var/lib/pcp/config/pmie/config.default (content)', 'after_header': '/var/lib/pcp/config/pmie/config.default (content)'}, {'before_header': '/var/lib/pcp/config/pmie/config.default (file attributes)', 'after_header': '/var/lib/pcp/config/pmie/config.default (file attributes)'}], 'invocation': {'module_args': {'state': 'absent', 'path': '/var/lib/pcp/config/pmie/config.default', 'regexp': '//.*global webhook_endpoint = ""', 'backrefs': False, 'create': False, 'backup': False, 'firstmatch': False, 'unsafe_writes': False, 'search_string': None, 'line': None, 'insertafter': None, 'insertbefore': None, 'validate': None, 'mode': None, 'owner': None, 'group': None, 'seuser': None, 'serole': None, 'selevel': None, 'setype': None, 'attributes': None}}, 'failed': False, 'item': 'default', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "pcp_pmie_endpoint | length > 0", "item": { "ansible_loop_var": "item", "backup": "", "changed": false, "diff": [ { "after": "", "after_header": "/var/lib/pcp/config/pmie/config.default (content)", "before": "", "before_header": "/var/lib/pcp/config/pmie/config.default (content)" }, { "after_header": "/var/lib/pcp/config/pmie/config.default (file attributes)", "before_header": "/var/lib/pcp/config/pmie/config.default (file attributes)" } ], "failed": false, "found": 0, "invocation": { "module_args": { "attributes": null, "backrefs": false, "backup": false, "create": false, "firstmatch": false, "group": null, "insertafter": null, "insertbefore": null, "line": null, "mode": null, "owner": null, "path": "/var/lib/pcp/config/pmie/config.default", "regexp": "//.*global webhook_endpoint = \"\"", "search_string": null, "selevel": null, "serole": null, "setype": null, "seuser": null, "state": "absent", "unsafe_writes": false, "validate": null } }, "item": "default", "msg": "" }, "skip_reason": "Conditional result was False" } skipping: [managed-node3] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra rules symlinks have been created for targeted hosts] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:86 Monday 23 June 2025 03:02:52 -0400 (0:00:00.045) 0:01:08.529 *********** changed: [managed-node3] => (item=network/tcplistenoverflows) => { "ansible_loop_var": "item", "changed": true, "dest": "/var/lib/pcp/config/pmieconf/network/tcplistenoverflows", "gid": 0, "group": "root", "item": "network/tcplistenoverflows", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 44, "src": "/etc/pcp/pmieconf/network/tcplistenoverflows", "state": "link", "uid": 0 } changed: [managed-node3] => (item=network/tcpqfulldocookies) => { "ansible_loop_var": "item", "changed": true, "dest": "/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies", "gid": 0, "group": "root", "item": "network/tcpqfulldocookies", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 43, "src": "/etc/pcp/pmieconf/network/tcpqfulldocookies", "state": "link", "uid": 0 } changed: [managed-node3] => (item=network/tcpqfulldrops) => { "ansible_loop_var": "item", "changed": true, "dest": "/var/lib/pcp/config/pmieconf/network/tcpqfulldrops", "gid": 0, "group": "root", "item": "network/tcpqfulldrops", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 39, "src": "/etc/pcp/pmieconf/network/tcpqfulldrops", "state": "link", "uid": 0 } changed: [managed-node3] => (item=power/thermal_throttle) => { "ansible_loop_var": "item", "changed": true, "dest": "/var/lib/pcp/config/pmieconf/power/thermal_throttle", "gid": 0, "group": "root", "item": "power/thermal_throttle", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 40, "src": "/etc/pcp/pmieconf/power/thermal_throttle", "state": "link", "uid": 0 } changed: [managed-node3] => (item=zeroconf/all_threads) => { "ansible_loop_var": "item", "changed": true, "dest": "/var/lib/pcp/config/pmieconf/zeroconf/all_threads", "gid": 0, "group": "root", "item": "zeroconf/all_threads", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 38, "src": "/etc/pcp/pmieconf/zeroconf/all_threads", "state": "link", "uid": 0 } changed: [managed-node3] => (item=filesys/vfs_files) => { "ansible_loop_var": "item", "changed": true, "dest": "/var/lib/pcp/config/pmieconf/filesys/vfs_files", "gid": 0, "group": "root", "item": "filesys/vfs_files", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 35, "src": "/etc/pcp/pmieconf/filesys/vfs_files", "state": "link", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Enable performance metric inference for targeted hosts (with control.d)] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:95 Monday 23 June 2025 03:02:56 -0400 (0:00:03.363) 0:01:11.893 *********** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Enable performance metric inference for targeted hosts (single control)] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:106 Monday 23 June 2025 03:02:56 -0400 (0:00:00.018) 0:01:11.911 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "pcp_single_control | d(true) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Set variable to do pmie restart if needed] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:116 Monday 23 June 2025 03:02:56 -0400 (0:00:00.019) 0:01:11.931 *********** ok: [managed-node3] => { "ansible_facts": { "__pcp_restart_pmie": true }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric inference is running and enabled on boot] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:120 Monday 23 June 2025 03:02:56 -0400 (0:00:00.025) 0:01:11.956 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "not __pcp_restart_pmie | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric inference is restarted and enabled on boot] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:127 Monday 23 June 2025 03:02:56 -0400 (0:00:00.017) 0:01:11.973 *********** changed: [managed-node3] => { "changed": true, "enabled": true, "name": "pmie", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Mon 2025-06-23 03:00:47 EDT", "ActiveEnterTimestampMonotonic": "271195226", "ActiveExitTimestamp": "Mon 2025-06-23 03:00:47 EDT", "ActiveExitTimestampMonotonic": "270717392", "ActiveState": "active", "After": "pcp-reboot-init.service network-online.target pmcd.service basic.target sysinit.target system.slice systemd-journald.socket", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Mon 2025-06-23 03:00:47 EDT", "AssertTimestampMonotonic": "270968600", "Before": "shutdown.target pmie_farm.service pmie_check.timer multi-user.target pmie_daily.timer", "BindsTo": "pmie_farm.service pmie_check.timer pmie_daily.timer", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "321460000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Mon 2025-06-23 03:00:47 EDT", "ConditionTimestampMonotonic": "270968596", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pmie_farm.service", "ControlGroup": "/system.slice/pmie.service", "ControlGroupId": "4791", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Inference Engine", "DevicePolicy": "auto", "Documentation": "\"man:pmie(1)\"", "DynamicUser": "no", "Environment": "PMIE_CHECK_PARAMS=--only-primary", "EnvironmentFiles": "/etc/sysconfig/pmie (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "23646", "ExecMainStartTimestamp": "Mon 2025-06-23 03:00:47 EDT", "ExecMainStartTimestampMonotonic": "271195192", "ExecMainStatus": "0", "ExecStart": "{ path=/etc/pcp/pmie/rc ; argv[]=/etc/pcp/pmie/rc start-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/etc/pcp/pmie/rc ; argv[]=/etc/pcp/pmie/rc start-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/etc/pcp/pmie/rc ; argv[]=/etc/pcp/pmie/rc stop-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/etc/pcp/pmie/rc ; argv[]=/etc/pcp/pmie/rc stop-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pmie.service", "FreezerState": "running", "GID": "996", "Group": "pcp", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pmie.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Mon 2025-06-23 03:00:47 EDT", "InactiveEnterTimestampMonotonic": "270968050", "InactiveExitTimestamp": "Mon 2025-06-23 03:00:47 EDT", "InactiveExitTimestampMonotonic": "270973395", "InvocationID": "a27ef810dda54bee810b74fc2b6c399b", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13688", "LimitNPROCSoft": "13688", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13688", "LimitSIGPENDINGSoft": "13688", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "23646", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "1818624", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pmie.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/pcp/pmie.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Mon 2025-06-23 03:00:47 EDT", "StateChangeTimestampMonotonic": "271195226", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "21900", "TimeoutAbortUSec": "2min", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "2min", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "2min", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "996", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "User": "pcp", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "pmcd.service", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Include pmlogger] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:48 Monday 23 June 2025 03:02:57 -0400 (0:00:01.258) 0:01:13.232 *********** included: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml for managed-node3 TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure metric log location is configured] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:4 Monday 23 June 2025 03:02:57 -0400 (0:00:00.073) 0:01:13.305 *********** ok: [managed-node3] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric logging is configured] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:12 Monday 23 June 2025 03:02:58 -0400 (0:00:00.571) 0:01:13.877 *********** ok: [managed-node3] => { "changed": false, "checksum": "67bc35973101c614e92b1990f8bebfffc39fe498", "dest": "/etc/sysconfig/pmlogger", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/sysconfig/pmlogger", "secontext": "system_u:object_r:etc_t:s0", "size": 1180, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric logging retention period is set] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:19 Monday 23 June 2025 03:02:59 -0400 (0:00:01.010) 0:01:14.887 *********** ok: [managed-node3] => { "changed": false, "checksum": "df7bd3b5b6f1de3af164aab81441c7251a13a298", "dest": "/etc/sysconfig/pmlogger_timers", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/sysconfig/pmlogger_timers", "secontext": "system_u:object_r:etc_t:s0", "size": 988, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Enable performance metric logging for targeted hosts (with control.d)] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:27 Monday 23 June 2025 03:03:00 -0400 (0:00:00.957) 0:01:15.845 *********** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Enable performance metric logging for targeted hosts (single control)] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:39 Monday 23 June 2025 03:03:00 -0400 (0:00:00.034) 0:01:15.879 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "pcp_single_control | d(true) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Set variable to do pmlogger restart if needed] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:49 Monday 23 June 2025 03:03:00 -0400 (0:00:00.038) 0:01:15.918 *********** ok: [managed-node3] => { "ansible_facts": { "__pcp_restart_pmlogger": false }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric logging is running and enabled on boot] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:53 Monday 23 June 2025 03:03:00 -0400 (0:00:00.034) 0:01:15.952 *********** ok: [managed-node3] => { "changed": false, "enabled": true, "name": "pmlogger", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Mon 2025-06-23 03:00:53 EDT", "ActiveEnterTimestampMonotonic": "276533087", "ActiveExitTimestamp": "Mon 2025-06-23 03:00:52 EDT", "ActiveExitTimestampMonotonic": "275811394", "ActiveState": "active", "After": "systemd-journald.socket pmcd.service basic.target sysinit.target system.slice pcp-reboot-init.service network-online.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Mon 2025-06-23 03:00:52 EDT", "AssertTimestampMonotonic": "275889370", "Before": "multi-user.target pmlogger_daily.timer pmlogger_farm.service pmlogger_check.timer shutdown.target", "BindsTo": "pmlogger_daily.timer pmlogger_farm.service pmlogger_check.timer", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "1305571000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Mon 2025-06-23 03:00:52 EDT", "ConditionTimestampMonotonic": "275889367", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pmlogger_farm.service", "ControlGroup": "/system.slice/pmlogger.service", "ControlGroupId": "4947", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Archive Logger", "DevicePolicy": "auto", "Documentation": "\"man:pmlogger(1)\"", "DynamicUser": "no", "Environment": "PMLOGGER_CHECK_PARAMS=--only-primary", "EnvironmentFiles": "/etc/sysconfig/pmlogger (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "25964", "ExecMainStartTimestamp": "Mon 2025-06-23 03:00:53 EDT", "ExecMainStartTimestampMonotonic": "276533053", "ExecMainStatus": "0", "ExecStart": "{ path=/etc/pcp/pmlogger/rc ; argv[]=/etc/pcp/pmlogger/rc start-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/etc/pcp/pmlogger/rc ; argv[]=/etc/pcp/pmlogger/rc start-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/etc/pcp/pmlogger/rc ; argv[]=/etc/pcp/pmlogger/rc stop-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/etc/pcp/pmlogger/rc ; argv[]=/etc/pcp/pmlogger/rc stop-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pmlogger.service", "FreezerState": "running", "GID": "996", "Group": "pcp", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pmlogger.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Mon 2025-06-23 03:00:52 EDT", "InactiveEnterTimestampMonotonic": "275888345", "InactiveExitTimestamp": "Mon 2025-06-23 03:00:52 EDT", "InactiveExitTimestampMonotonic": "275893367", "InvocationID": "124674f0665f4f1f96a11a28632b1038", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13688", "LimitNPROCSoft": "13688", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13688", "LimitSIGPENDINGSoft": "13688", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "25964", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "3710976", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pmlogger.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/pcp/pmlogger.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Mon 2025-06-23 03:00:53 EDT", "StateChangeTimestampMonotonic": "276533087", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "21900", "TimeoutAbortUSec": "2min", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "2min", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "2min", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "996", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "User": "pcp", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "pmcd.service", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric logging is restarted and enabled on boot] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:60 Monday 23 June 2025 03:03:00 -0400 (0:00:00.723) 0:01:16.676 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "__pcp_restart_pmlogger | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Include pmproxy] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:51 Monday 23 June 2025 03:03:00 -0400 (0:00:00.023) 0:01:16.700 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "pcp_rest_api | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [Setup metric graphing service.] ****************************************** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:106 Monday 23 June 2025 03:03:00 -0400 (0:00:00.025) 0:01:16.725 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_graph_service | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Configure firewall] ****************** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:114 Monday 23 June 2025 03:03:00 -0400 (0:00:00.017) 0:01:16.743 *********** included: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml for managed-node3 TASK [fedora.linux_system_roles.metrics : Initialize __metrics_firewall] ******* task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:9 Monday 23 June 2025 03:03:00 -0400 (0:00:00.037) 0:01:16.780 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_manage_firewall | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Port for pmcd] *********************** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:13 Monday 23 June 2025 03:03:01 -0400 (0:00:00.018) 0:01:16.799 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_manage_firewall | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Port for pmproxy used by query and grafana] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:19 Monday 23 June 2025 03:03:01 -0400 (0:00:00.017) 0:01:16.816 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_manage_firewall | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Service for grafana] ***************** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:25 Monday 23 June 2025 03:03:01 -0400 (0:00:00.017) 0:01:16.833 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_manage_firewall | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Service for valkey] ****************** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:31 Monday 23 June 2025 03:03:01 -0400 (0:00:00.016) 0:01:16.850 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_manage_firewall | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Service for redis] ******************* task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:38 Monday 23 June 2025 03:03:01 -0400 (0:00:00.016) 0:01:16.866 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_manage_firewall | bool", "skip_reason": "Conditional result was False" } TASK [Ensure the service and the port status with the firewall role] *********** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:45 Monday 23 June 2025 03:03:01 -0400 (0:00:00.017) 0:01:16.884 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_manage_firewall | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Configure selinux] ******************* task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:117 Monday 23 June 2025 03:03:01 -0400 (0:00:00.019) 0:01:16.904 *********** included: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/selinux.yml for managed-node3 TASK [fedora.linux_system_roles.metrics : Set pcp_bind_all_unreserved_ports] *** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/selinux.yml:6 Monday 23 June 2025 03:03:01 -0400 (0:00:00.042) 0:01:16.946 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_manage_selinux | bool", "skip_reason": "Conditional result was False" } TASK [Ensure the port status with the selinux role] **************************** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/selinux.yml:11 Monday 23 June 2025 03:03:01 -0400 (0:00:00.018) 0:01:16.965 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_manage_selinux | bool", "skip_reason": "Conditional result was False" } TASK [Check if BPFTrace & SASL works] ****************************************** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_bpftrace.yml:36 Monday 23 June 2025 03:03:01 -0400 (0:00:00.031) 0:01:16.996 *********** included: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/check_bpftrace.yml for managed-node3 => (item=check_bpftrace.yml) included: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/check_sasl.yml for managed-node3 => (item=check_sasl.yml) included: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/check_firewall_selinux.yml for managed-node3 => (item=check_firewall_selinux.yml) TASK [Check if bpftrace pmda is registered] ************************************ task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/check_bpftrace.yml:3 Monday 23 June 2025 03:03:01 -0400 (0:00:00.043) 0:01:17.040 *********** FAILED - RETRYING: [managed-node3]: Check if bpftrace pmda is registered (10 retries left). FAILED - RETRYING: [managed-node3]: Check if bpftrace pmda is registered (9 retries left). FAILED - RETRYING: [managed-node3]: Check if bpftrace pmda is registered (8 retries left). FAILED - RETRYING: [managed-node3]: Check if bpftrace pmda is registered (7 retries left). FAILED - RETRYING: [managed-node3]: Check if bpftrace pmda is registered (6 retries left). FAILED - RETRYING: [managed-node3]: Check if bpftrace pmda is registered (5 retries left). FAILED - RETRYING: [managed-node3]: Check if bpftrace pmda is registered (4 retries left). FAILED - RETRYING: [managed-node3]: Check if bpftrace pmda is registered (3 retries left). FAILED - RETRYING: [managed-node3]: Check if bpftrace pmda is registered (2 retries left). FAILED - RETRYING: [managed-node3]: Check if bpftrace pmda is registered (1 retries left). fatal: [managed-node3]: FAILED! => { "attempts": 10, "changed": false, "cmd": [ "pmprobe", "-I", "pmcd.agent.status" ], "delta": "0:00:00.008080", "end": "2025-06-23 03:03:18.213144", "rc": 0, "start": "2025-06-23 03:03:18.205064" } STDOUT: pmcd.agent.status 9 "root" "pmcd" "proc" "pmproxy" "xfs" "linux" "mmv" "kvm" "jbd2" TASK [Handle failure case] ***************************************************** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_bpftrace.yml:47 Monday 23 June 2025 03:03:18 -0400 (0:00:17.064) 0:01:34.104 *********** included: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml for managed-node3 TASK [Collect logs] ************************************************************ task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml:2 Monday 23 June 2025 03:03:18 -0400 (0:00:00.055) 0:01:34.160 *********** ok: [managed-node3] => { "changed": false, "cmd": "journalctl -ex\necho '##################'\necho List of SELinux AVCs - note list may be empty\ngrep type=AVC /var/log/audit/audit.log\necho '##################'\nls -alrtF /run\nif [ -d /run/pcp ]; then\n ls -alrtF /run/pcp\nelse\n echo ERROR - /run/pcp does not exist\nfi\n", "delta": "0:00:00.054348", "end": "2025-06-23 03:03:18.880145", "rc": 0, "start": "2025-06-23 03:03:18.825797" } STDOUT: Jun 23 02:56:23 localhost systemd[1]: Rebuild Hardware Database was skipped because of an unmet condition check (ConditionNeedsUpdate=/etc). Jun 23 02:56:23 localhost systemd[1]: Starting Load/Save OS Random Seed... Jun 23 02:56:23 localhost systemd[1]: Create System Users was skipped because no trigger condition checks were met. Jun 23 02:56:23 localhost systemd[1]: Starting Create Static Device Nodes in /dev... Jun 23 02:56:23 localhost systemd[1]: Started Journal Service. Jun 23 02:56:23 localhost systemd[1]: modprobe@configfs.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@configfs.service has successfully entered the 'dead' state. Jun 23 02:56:23 localhost systemd[1]: Finished Load Kernel Module configfs. ░░ Subject: A start job for unit modprobe@configfs.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@configfs.service has finished successfully. ░░ ░░ The job identifier is 150. Jun 23 02:56:23 localhost systemd[1]: modprobe@efi_pstore.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@efi_pstore.service has successfully entered the 'dead' state. Jun 23 02:56:23 localhost systemd[1]: Finished Load Kernel Module efi_pstore. ░░ Subject: A start job for unit modprobe@efi_pstore.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@efi_pstore.service has finished successfully. ░░ ░░ The job identifier is 152. Jun 23 02:56:23 localhost systemd[1]: Finished Coldplug All udev Devices. ░░ Subject: A start job for unit systemd-udev-trigger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udev-trigger.service has finished successfully. ░░ ░░ The job identifier is 167. Jun 23 02:56:23 localhost systemd[1]: Mounting Kernel Configuration File System... ░░ Subject: A start job for unit sys-kernel-config.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-kernel-config.mount has begun execution. ░░ ░░ The job identifier is 149. Jun 23 02:56:23 localhost systemd[1]: Starting Flush Journal to Persistent Storage... ░░ Subject: A start job for unit systemd-journal-flush.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-journal-flush.service has begun execution. ░░ ░░ The job identifier is 168. Jun 23 02:56:23 localhost systemd[1]: Platform Persistent Storage Archival was skipped because of an unmet condition check (ConditionDirectoryNotEmpty=/sys/fs/pstore). ░░ Subject: A start job for unit systemd-pstore.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-pstore.service has finished successfully. ░░ ░░ The job identifier is 151. Jun 23 02:56:23 localhost systemd[1]: Mounted Kernel Configuration File System. ░░ Subject: A start job for unit sys-kernel-config.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-kernel-config.mount has finished successfully. ░░ ░░ The job identifier is 149. Jun 23 02:56:23 localhost systemd[1]: Finished Load/Save OS Random Seed. ░░ Subject: A start job for unit systemd-random-seed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-random-seed.service has finished successfully. ░░ ░░ The job identifier is 133. Jun 23 02:56:23 localhost systemd[1]: First Boot Complete was skipped because of an unmet condition check (ConditionFirstBoot=yes). ░░ Subject: A start job for unit first-boot-complete.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit first-boot-complete.target has finished successfully. ░░ ░░ The job identifier is 134. Jun 23 02:56:23 localhost kernel: fuse: init (API version 7.37) Jun 23 02:56:23 localhost systemd-journald[499]: Runtime Journal (/run/log/journal/87ee650b3b634e21b69b31b9529a53cf) is 8.0M, max 69.3M, 61.3M free. ░░ Subject: Disk space used by the journal ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ Runtime Journal (/run/log/journal/87ee650b3b634e21b69b31b9529a53cf) is currently using 8.0M. ░░ Maximum allowed usage is set to 69.3M. ░░ Leaving at least 34.6M free (of currently available 676.9M of disk space). ░░ Enforced usage limit is thus 69.3M, of which 61.3M are still available. ░░ ░░ The limits controlling how much disk space is used by the journal may ░░ be configured with SystemMaxUse=, SystemKeepFree=, SystemMaxFileSize=, ░░ RuntimeMaxUse=, RuntimeKeepFree=, RuntimeMaxFileSize= settings in ░░ /etc/systemd/journald.conf. See journald.conf(5) for details. Jun 23 02:56:23 localhost systemd-journald[499]: Received client request to flush runtime journal. Jun 23 02:56:23 localhost systemd[1]: Finished Flush Journal to Persistent Storage. ░░ Subject: A start job for unit systemd-journal-flush.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-journal-flush.service has finished successfully. ░░ ░░ The job identifier is 168. Jun 23 02:56:23 localhost systemd[1]: modprobe@fuse.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@fuse.service has successfully entered the 'dead' state. Jun 23 02:56:23 localhost systemd[1]: Finished Load Kernel Module fuse. ░░ Subject: A start job for unit modprobe@fuse.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@fuse.service has finished successfully. ░░ ░░ The job identifier is 144. Jun 23 02:56:23 localhost kernel: ACPI: bus type drm_connector registered Jun 23 02:56:23 localhost systemd[1]: Mounting FUSE Control File System... ░░ Subject: A start job for unit sys-fs-fuse-connections.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-fs-fuse-connections.mount has begun execution. ░░ ░░ The job identifier is 143. Jun 23 02:56:23 localhost systemd[1]: modprobe@drm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@drm.service has successfully entered the 'dead' state. Jun 23 02:56:23 localhost systemd[1]: Finished Load Kernel Module drm. ░░ Subject: A start job for unit modprobe@drm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@drm.service has finished successfully. ░░ ░░ The job identifier is 202. Jun 23 02:56:23 localhost systemd[1]: Mounted FUSE Control File System. ░░ Subject: A start job for unit sys-fs-fuse-connections.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-fs-fuse-connections.mount has finished successfully. ░░ ░░ The job identifier is 143. Jun 23 02:56:24 localhost systemd[1]: Finished Create Static Device Nodes in /dev. ░░ Subject: A start job for unit systemd-tmpfiles-setup-dev.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup-dev.service has finished successfully. ░░ ░░ The job identifier is 180. Jun 23 02:56:24 localhost systemd[1]: Reached target Preparation for Local File Systems. ░░ Subject: A start job for unit local-fs-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit local-fs-pre.target has finished successfully. ░░ ░░ The job identifier is 137. Jun 23 02:56:24 localhost systemd[1]: Reached target Local File Systems. ░░ Subject: A start job for unit local-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit local-fs.target has finished successfully. ░░ ░░ The job identifier is 135. Jun 23 02:56:24 localhost systemd[1]: Rebuild Dynamic Linker Cache was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit ldconfig.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit ldconfig.service has finished successfully. ░░ ░░ The job identifier is 155. Jun 23 02:56:24 localhost systemd[1]: Mark the need to relabel after reboot was skipped because of an unmet condition check (ConditionSecurity=!selinux). ░░ Subject: A start job for unit selinux-autorelabel-mark.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit selinux-autorelabel-mark.service has finished successfully. ░░ ░░ The job identifier is 162. Jun 23 02:56:24 localhost systemd[1]: Set Up Additional Binary Formats was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-binfmt.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-binfmt.service has finished successfully. ░░ ░░ The job identifier is 142. Jun 23 02:56:24 localhost systemd[1]: Update Boot Loader Random Seed was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-boot-random-seed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-boot-random-seed.service has finished successfully. ░░ ░░ The job identifier is 169. Jun 23 02:56:24 localhost systemd[1]: Starting Automatic Boot Loader Update... ░░ Subject: A start job for unit systemd-boot-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-boot-update.service has begun execution. ░░ ░░ The job identifier is 154. Jun 23 02:56:24 localhost systemd[1]: Starting Commit a transient machine-id on disk... ░░ Subject: A start job for unit systemd-machine-id-commit.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-machine-id-commit.service has begun execution. ░░ ░░ The job identifier is 179. Jun 23 02:56:24 localhost systemd[1]: Starting Create Volatile Files and Directories... ░░ Subject: A start job for unit systemd-tmpfiles-setup.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup.service has begun execution. ░░ ░░ The job identifier is 158. Jun 23 02:56:24 localhost systemd[1]: Starting Rule-based Manager for Device Events and Files... ░░ Subject: A start job for unit systemd-udevd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udevd.service has begun execution. ░░ ░░ The job identifier is 126. Jun 23 02:56:24 localhost bootctl[514]: Couldn't find EFI system partition, skipping. Jun 23 02:56:24 localhost systemd[1]: Finished Automatic Boot Loader Update. ░░ Subject: A start job for unit systemd-boot-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-boot-update.service has finished successfully. ░░ ░░ The job identifier is 154. Jun 23 02:56:24 localhost systemd[1]: etc-machine\x2did.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit etc-machine\x2did.mount has successfully entered the 'dead' state. Jun 23 02:56:24 localhost systemd[1]: Finished Commit a transient machine-id on disk. ░░ Subject: A start job for unit systemd-machine-id-commit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-machine-id-commit.service has finished successfully. ░░ ░░ The job identifier is 179. Jun 23 02:56:24 localhost systemd-udevd[517]: Using default interface naming scheme 'rhel-9.0'. Jun 23 02:56:24 localhost systemd[1]: Finished Create Volatile Files and Directories. ░░ Subject: A start job for unit systemd-tmpfiles-setup.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup.service has finished successfully. ░░ ░░ The job identifier is 158. Jun 23 02:56:24 localhost systemd[1]: Mounting RPC Pipe File System... ░░ Subject: A start job for unit var-lib-nfs-rpc_pipefs.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit var-lib-nfs-rpc_pipefs.mount has begun execution. ░░ ░░ The job identifier is 223. Jun 23 02:56:24 localhost systemd[1]: Starting Security Auditing Service... ░░ Subject: A start job for unit auditd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has begun execution. ░░ ░░ The job identifier is 200. Jun 23 02:56:24 localhost systemd[1]: Starting RPC Bind... ░░ Subject: A start job for unit rpcbind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpcbind.service has begun execution. ░░ ░░ The job identifier is 234. Jun 23 02:56:24 localhost systemd[1]: Rebuild Journal Catalog was skipped because of an unmet condition check (ConditionNeedsUpdate=/var). ░░ Subject: A start job for unit systemd-journal-catalog-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-journal-catalog-update.service has finished successfully. ░░ ░░ The job identifier is 174. Jun 23 02:56:24 localhost systemd[1]: Update is Completed was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-update-done.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-done.service has finished successfully. ░░ ░░ The job identifier is 131. Jun 23 02:56:24 localhost auditd[523]: No plugins found, not dispatching events Jun 23 02:56:24 localhost auditd[523]: Init complete, auditd 3.1.5 listening for events (startup state enable) Jun 23 02:56:24 localhost kernel: RPC: Registered named UNIX socket transport module. Jun 23 02:56:24 localhost kernel: RPC: Registered udp transport module. Jun 23 02:56:24 localhost kernel: RPC: Registered tcp transport module. Jun 23 02:56:24 localhost kernel: RPC: Registered tcp-with-tls transport module. Jun 23 02:56:24 localhost kernel: RPC: Registered tcp NFSv4.1 backchannel transport module. Jun 23 02:56:24 localhost systemd[1]: Mounted RPC Pipe File System. ░░ Subject: A start job for unit var-lib-nfs-rpc_pipefs.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit var-lib-nfs-rpc_pipefs.mount has finished successfully. ░░ ░░ The job identifier is 223. Jun 23 02:56:24 localhost systemd[1]: Reached target rpc_pipefs.target. ░░ Subject: A start job for unit rpc_pipefs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc_pipefs.target has finished successfully. ░░ ░░ The job identifier is 222. Jun 23 02:56:24 localhost systemd[1]: Started RPC Bind. ░░ Subject: A start job for unit rpcbind.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpcbind.service has finished successfully. ░░ ░░ The job identifier is 234. Jun 23 02:56:25 localhost systemd[1]: Started Rule-based Manager for Device Events and Files. ░░ Subject: A start job for unit systemd-udevd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udevd.service has finished successfully. ░░ ░░ The job identifier is 126. Jun 23 02:56:25 localhost systemd[1]: Starting Load Kernel Module configfs... ░░ Subject: A start job for unit modprobe@configfs.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@configfs.service has begun execution. ░░ ░░ The job identifier is 264. Jun 23 02:56:25 localhost systemd[1]: modprobe@configfs.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@configfs.service has successfully entered the 'dead' state. Jun 23 02:56:25 localhost systemd[1]: Finished Load Kernel Module configfs. ░░ Subject: A start job for unit modprobe@configfs.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@configfs.service has finished successfully. ░░ ░░ The job identifier is 264. Jun 23 02:56:25 localhost systemd[1]: Condition check resulted in /dev/ttyS0 being skipped. ░░ Subject: A start job for unit dev-ttyS0.device has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dev-ttyS0.device has finished successfully. ░░ ░░ The job identifier is 248. Jun 23 02:56:25 localhost augenrules[526]: /sbin/augenrules: No change Jun 23 02:56:25 localhost kernel: input: PC Speaker as /devices/platform/pcspkr/input/input5 Jun 23 02:56:25 localhost kernel: piix4_smbus 0000:00:01.3: SMBus base address uninitialized - upgrade BIOS or use force_addr=0xaddr Jun 23 02:56:25 localhost augenrules[570]: No rules Jun 23 02:56:25 localhost augenrules[570]: enabled 1 Jun 23 02:56:25 localhost augenrules[570]: failure 1 Jun 23 02:56:25 localhost augenrules[570]: pid 523 Jun 23 02:56:25 localhost augenrules[570]: rate_limit 0 Jun 23 02:56:25 localhost augenrules[570]: backlog_limit 8192 Jun 23 02:56:25 localhost augenrules[570]: lost 0 Jun 23 02:56:25 localhost augenrules[570]: backlog 4 Jun 23 02:56:25 localhost augenrules[570]: backlog_wait_time 60000 Jun 23 02:56:25 localhost augenrules[570]: backlog_wait_time_actual 0 Jun 23 02:56:25 localhost augenrules[570]: enabled 1 Jun 23 02:56:25 localhost augenrules[570]: failure 1 Jun 23 02:56:25 localhost augenrules[570]: pid 523 Jun 23 02:56:25 localhost augenrules[570]: rate_limit 0 Jun 23 02:56:25 localhost augenrules[570]: backlog_limit 8192 Jun 23 02:56:25 localhost augenrules[570]: lost 0 Jun 23 02:56:25 localhost augenrules[570]: backlog 4 Jun 23 02:56:25 localhost augenrules[570]: backlog_wait_time 60000 Jun 23 02:56:25 localhost augenrules[570]: backlog_wait_time_actual 0 Jun 23 02:56:25 localhost augenrules[570]: enabled 1 Jun 23 02:56:25 localhost augenrules[570]: failure 1 Jun 23 02:56:25 localhost augenrules[570]: pid 523 Jun 23 02:56:25 localhost augenrules[570]: rate_limit 0 Jun 23 02:56:25 localhost augenrules[570]: backlog_limit 8192 Jun 23 02:56:25 localhost augenrules[570]: lost 0 Jun 23 02:56:25 localhost augenrules[570]: backlog 4 Jun 23 02:56:25 localhost augenrules[570]: backlog_wait_time 60000 Jun 23 02:56:25 localhost augenrules[570]: backlog_wait_time_actual 0 Jun 23 02:56:25 localhost systemd[1]: Started Security Auditing Service. ░░ Subject: A start job for unit auditd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has finished successfully. ░░ ░░ The job identifier is 200. Jun 23 02:56:25 localhost systemd[1]: Starting Record System Boot/Shutdown in UTMP... ░░ Subject: A start job for unit systemd-update-utmp.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp.service has begun execution. ░░ ░░ The job identifier is 212. Jun 23 02:56:25 localhost systemd-udevd[550]: Network interface NamePolicy= disabled on kernel command line. Jun 23 02:56:25 localhost systemd[1]: Finished Record System Boot/Shutdown in UTMP. ░░ Subject: A start job for unit systemd-update-utmp.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp.service has finished successfully. ░░ ░░ The job identifier is 212. Jun 23 02:56:25 localhost systemd[1]: Reached target System Initialization. ░░ Subject: A start job for unit sysinit.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sysinit.target has finished successfully. ░░ ░░ The job identifier is 117. Jun 23 02:56:25 localhost systemd[1]: Started dnf makecache --timer. ░░ Subject: A start job for unit dnf-makecache.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dnf-makecache.timer has finished successfully. ░░ ░░ The job identifier is 185. Jun 23 02:56:25 localhost systemd[1]: Started Daily rotation of log files. ░░ Subject: A start job for unit logrotate.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.timer has finished successfully. ░░ ░░ The job identifier is 184. Jun 23 02:56:25 localhost systemd[1]: Started Daily Cleanup of Temporary Directories. ░░ Subject: A start job for unit systemd-tmpfiles-clean.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-clean.timer has finished successfully. ░░ ░░ The job identifier is 183. Jun 23 02:56:25 localhost systemd[1]: Reached target Timer Units. ░░ Subject: A start job for unit timers.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit timers.target has finished successfully. ░░ ░░ The job identifier is 182. Jun 23 02:56:25 localhost systemd[1]: Listening on D-Bus System Message Bus Socket. ░░ Subject: A start job for unit dbus.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus.socket has finished successfully. ░░ ░░ The job identifier is 190. Jun 23 02:56:25 localhost systemd[1]: Listening on SSSD Kerberos Cache Manager responder socket. ░░ Subject: A start job for unit sssd-kcm.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sssd-kcm.socket has finished successfully. ░░ ░░ The job identifier is 194. Jun 23 02:56:25 localhost systemd[1]: Reached target Socket Units. ░░ Subject: A start job for unit sockets.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sockets.target has finished successfully. ░░ ░░ The job identifier is 193. Jun 23 02:56:25 localhost kernel: cirrus 0000:00:02.0: vgaarb: deactivate vga console Jun 23 02:56:25 localhost kernel: Console: switching to colour dummy device 80x25 Jun 23 02:56:25 localhost kernel: RAPL PMU: API unit is 2^-32 Joules, 0 fixed counters, 655360 ms ovfl timer Jun 23 02:56:25 localhost kernel: [drm] Initialized cirrus 2.0.0 for 0000:00:02.0 on minor 0 Jun 23 02:56:25 localhost kernel: fbcon: cirrusdrmfb (fb0) is primary device Jun 23 02:56:25 localhost kernel: Console: switching to colour frame buffer device 128x48 Jun 23 02:56:25 localhost kernel: cirrus 0000:00:02.0: [drm] fb0: cirrusdrmfb frame buffer device Jun 23 02:56:25 localhost systemd[1]: Starting D-Bus System Message Bus... ░░ Subject: A start job for unit dbus-broker.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus-broker.service has begun execution. ░░ ░░ The job identifier is 189. Jun 23 02:56:25 localhost systemd[1]: TPM2 PCR Barrier (Initialization) was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f). ░░ Subject: A start job for unit systemd-pcrphase-sysinit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-pcrphase-sysinit.service has finished successfully. ░░ ░░ The job identifier is 173. Jun 23 02:56:25 localhost systemd[1]: Started D-Bus System Message Bus. ░░ Subject: A start job for unit dbus-broker.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus-broker.service has finished successfully. ░░ ░░ The job identifier is 189. Jun 23 02:56:25 localhost systemd[1]: Reached target Basic System. ░░ Subject: A start job for unit basic.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit basic.target has finished successfully. ░░ ░░ The job identifier is 116. Jun 23 02:56:25 localhost dbus-broker-lau[591]: Ready Jun 23 02:56:25 localhost systemd[1]: Starting NTP client/server... ░░ Subject: A start job for unit chronyd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit chronyd.service has begun execution. ░░ ░░ The job identifier is 231. Jun 23 02:56:25 localhost systemd[1]: Starting Cloud-init: Local Stage (pre-network)... ░░ Subject: A start job for unit cloud-init-local.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init-local.service has begun execution. ░░ ░░ The job identifier is 237. Jun 23 02:56:25 localhost systemd[1]: Starting Restore /run/initramfs on shutdown... ░░ Subject: A start job for unit dracut-shutdown.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-shutdown.service has begun execution. ░░ ░░ The job identifier is 163. Jun 23 02:56:25 localhost systemd[1]: Started irqbalance daemon. ░░ Subject: A start job for unit irqbalance.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit irqbalance.service has finished successfully. ░░ ░░ The job identifier is 230. Jun 23 02:56:25 localhost systemd[1]: Load CPU microcode update was skipped because of an unmet condition check (ConditionPathExists=/sys/devices/system/cpu/microcode/reload). ░░ Subject: A start job for unit microcode.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit microcode.service has finished successfully. ░░ ░░ The job identifier is 181. Jun 23 02:56:25 localhost systemd[1]: Started Hardware RNG Entropy Gatherer Daemon. ░░ Subject: A start job for unit rngd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rngd.service has finished successfully. ░░ ░░ The job identifier is 214. Jun 23 02:56:25 localhost systemd[1]: OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ecdsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ecdsa.service has finished successfully. ░░ ░░ The job identifier is 209. Jun 23 02:56:25 localhost systemd[1]: OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ed25519.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ed25519.service has finished successfully. ░░ ░░ The job identifier is 210. Jun 23 02:56:25 localhost systemd[1]: OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@rsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@rsa.service has finished successfully. ░░ ░░ The job identifier is 207. Jun 23 02:56:25 localhost systemd[1]: Reached target sshd-keygen.target. ░░ Subject: A start job for unit sshd-keygen.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen.target has finished successfully. ░░ ░░ The job identifier is 206. Jun 23 02:56:25 localhost systemd[1]: System Security Services Daemon was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit sssd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sssd.service has finished successfully. ░░ ░░ The job identifier is 228. Jun 23 02:56:25 localhost systemd[1]: Reached target User and Group Name Lookups. ░░ Subject: A start job for unit nss-user-lookup.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit nss-user-lookup.target has finished successfully. ░░ ░░ The job identifier is 229. Jun 23 02:56:25 localhost systemd[1]: Starting User Login Management... ░░ Subject: A start job for unit systemd-logind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has begun execution. ░░ ░░ The job identifier is 201. Jun 23 02:56:25 localhost systemd[1]: Starting Rotate log files... ░░ Subject: A start job for unit logrotate.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has begun execution. ░░ ░░ The job identifier is 268. Jun 23 02:56:25 localhost systemd[1]: Finished Restore /run/initramfs on shutdown. ░░ Subject: A start job for unit dracut-shutdown.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-shutdown.service has finished successfully. ░░ ░░ The job identifier is 163. Jun 23 02:56:25 localhost systemd-logind[606]: New seat seat0. ░░ Subject: A new seat seat0 is now available ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new seat seat0 has been configured and is now available. Jun 23 02:56:25 localhost systemd-logind[606]: Watching system buttons on /dev/input/event0 (Power Button) Jun 23 02:56:25 localhost systemd-logind[606]: Watching system buttons on /dev/input/event1 (Sleep Button) Jun 23 02:56:25 localhost systemd-logind[606]: Watching system buttons on /dev/input/event2 (AT Translated Set 2 keyboard) Jun 23 02:56:25 localhost systemd[1]: Started User Login Management. ░░ Subject: A start job for unit systemd-logind.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has finished successfully. ░░ ░░ The job identifier is 201. Jun 23 02:56:25 localhost systemd[1]: logrotate.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit logrotate.service has successfully entered the 'dead' state. Jun 23 02:56:25 localhost systemd[1]: Finished Rotate log files. ░░ Subject: A start job for unit logrotate.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has finished successfully. ░░ ░░ The job identifier is 268. Jun 23 02:56:25 localhost rngd[605]: Disabling 7: PKCS11 Entropy generator (pkcs11) Jun 23 02:56:25 localhost rngd[605]: Disabling 5: NIST Network Entropy Beacon (nist) Jun 23 02:56:25 localhost rngd[605]: Disabling 9: Qrypt quantum entropy beacon (qrypt) Jun 23 02:56:25 localhost rngd[605]: Disabling 10: Named pipe entropy input (namedpipe) Jun 23 02:56:25 localhost rngd[605]: Initializing available sources Jun 23 02:56:25 localhost rngd[605]: [hwrng ]: Initialization Failed Jun 23 02:56:25 localhost rngd[605]: [rdrand]: Enabling RDRAND rng support Jun 23 02:56:25 localhost rngd[605]: [rdrand]: Initialized Jun 23 02:56:25 localhost rngd[605]: [jitter]: JITTER timeout set to 5 sec Jun 23 02:56:25 localhost rngd[605]: [jitter]: Initializing AES buffer Jun 23 02:56:25 localhost chronyd[615]: chronyd version 4.6.1 starting (+CMDMON +NTP +REFCLOCK +RTC +PRIVDROP +SCFILTER +SIGND +ASYNCDNS +NTS +SECHASH +IPV6 +DEBUG) Jun 23 02:56:25 localhost chronyd[615]: Loaded 0 symmetric keys Jun 23 02:56:26 localhost chronyd[615]: Using right/UTC timezone to obtain leap second data Jun 23 02:56:26 localhost chronyd[615]: Frequency 0.000 +/- 1000000.000 ppm read from /var/lib/chrony/drift Jun 23 02:56:26 localhost chronyd[615]: Loaded seccomp filter (level 2) Jun 23 02:56:26 localhost systemd[1]: Started NTP client/server. ░░ Subject: A start job for unit chronyd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit chronyd.service has finished successfully. ░░ ░░ The job identifier is 231. Jun 23 02:56:28 localhost cloud-init[619]: Cloud-init v. 24.4-6.el9 running 'init-local' at Mon, 23 Jun 2025 06:56:28 +0000. Up 12.14 seconds. Jun 23 02:56:29 localhost dhclient[623]: Internet Systems Consortium DHCP Client 4.4.2b1 Jun 23 02:56:29 localhost dhclient[623]: Copyright 2004-2019 Internet Systems Consortium. Jun 23 02:56:29 localhost dhclient[623]: All rights reserved. Jun 23 02:56:29 localhost dhclient[623]: For info, please visit https://www.isc.org/software/dhcp/ Jun 23 02:56:29 localhost dhclient[623]: Jun 23 02:56:29 localhost dhclient[623]: Listening on LPF/eth0/0e:ba:18:13:79:93 Jun 23 02:56:29 localhost dhclient[623]: Sending on LPF/eth0/0e:ba:18:13:79:93 Jun 23 02:56:29 localhost dhclient[623]: Sending on Socket/fallback Jun 23 02:56:29 localhost dhclient[623]: DHCPDISCOVER on eth0 to 255.255.255.255 port 67 interval 7 (xid=0x1413cf47) Jun 23 02:56:29 localhost dhclient[623]: DHCPOFFER of 10.31.40.4 from 10.31.40.1 Jun 23 02:56:29 localhost dhclient[623]: DHCPREQUEST for 10.31.40.4 on eth0 to 255.255.255.255 port 67 (xid=0x1413cf47) Jun 23 02:56:29 localhost dhclient[623]: DHCPACK of 10.31.40.4 from 10.31.40.1 (xid=0x1413cf47) Jun 23 02:56:29 localhost dhclient[623]: bound to 10.31.40.4 -- renewal in 1583 seconds. Jun 23 02:56:29 localhost systemd[1]: Starting Hostname Service... ░░ Subject: A start job for unit systemd-hostnamed.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has begun execution. ░░ ░░ The job identifier is 333. Jun 23 02:56:29 localhost systemd[1]: Started Hostname Service. ░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has finished successfully. ░░ ░░ The job identifier is 333. Jun 23 02:56:29 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd-hostnamed[638]: Hostname set to (static) Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com rngd[605]: [jitter]: Unable to obtain AES key, disabling JITTER source Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com rngd[605]: [jitter]: Initialization Failed Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com rngd[605]: Process privileges have been dropped to 2:2 Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Finished Cloud-init: Local Stage (pre-network). ░░ Subject: A start job for unit cloud-init-local.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init-local.service has finished successfully. ░░ ░░ The job identifier is 237. Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Preparation for Network. ░░ Subject: A start job for unit network-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network-pre.target has finished successfully. ░░ ░░ The job identifier is 124. Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager... ░░ Subject: A start job for unit NetworkManager.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager.service has begun execution. ░░ ░░ The job identifier is 188. Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.1846] NetworkManager (version 1.53.4-1.el9) is starting... (boot:04981b48-ac4c-4de6-af48-8c14214b666d) Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.1849] Read config: /etc/NetworkManager/NetworkManager.conf, /run/NetworkManager/conf.d/15-carrier-timeout.conf Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.1942] manager[0x55c2b5756080]: monitoring kernel firmware directory '/lib/firmware'. Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.1972] hostname: hostname: using hostnamed Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.1973] hostname: static hostname changed from (none) to "ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com" Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.1977] dns-mgr: init: dns=default,systemd-resolved rc-manager=symlink (auto) Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.2431] manager[0x55c2b5756080]: rfkill: Wi-Fi hardware radio set enabled Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.2432] manager[0x55c2b5756080]: rfkill: WWAN hardware radio set enabled Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.2530] Loaded device plugin: NMTeamFactory (/usr/lib64/NetworkManager/1.53.4-1.el9/libnm-device-plugin-team.so) Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.2532] manager: rfkill: Wi-Fi enabled by radio killswitch; enabled by state file Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.2533] manager: rfkill: WWAN enabled by radio killswitch; enabled by state file Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.2534] manager: Networking is enabled by state file Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.2543] settings: Loaded settings plugin: keyfile (internal) Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 400. Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Listening on Load/Save RF Kill Switch Status /dev/rfkill Watch. ░░ Subject: A start job for unit systemd-rfkill.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-rfkill.socket has finished successfully. ░░ ░░ The job identifier is 466. Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.2847] settings: Loaded settings plugin: ifcfg-rh ("/usr/lib64/NetworkManager/1.53.4-1.el9/libnm-settings-plugin-ifcfg-rh.so") Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.2884] Warning: the ifcfg-rh plugin is deprecated, please migrate connections to the keyfile format using "nmcli connection migrate" Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.2903] dhcp: init: Using DHCP client 'internal' Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.2907] manager: (lo): new Loopback device (/org/freedesktop/NetworkManager/Devices/1) Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.2921] device (lo): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.2932] device (lo): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.2943] device (lo): Activation: starting connection 'lo' (56186a50-a32b-4e3d-a835-f4d45203b26f) Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.2951] manager: (eth0): new Ethernet device (/org/freedesktop/NetworkManager/Devices/2) Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.2957] device (eth0): state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external') Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Network Manager. ░░ Subject: A start job for unit NetworkManager.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager.service has finished successfully. ░░ ░░ The job identifier is 188. Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.2985] bus-manager: acquired D-Bus service "org.freedesktop.NetworkManager" Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.2998] device (lo): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.3001] device (lo): state change: prepare -> config (reason 'none', managed-type: 'external') Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.3003] device (lo): state change: config -> ip-config (reason 'none', managed-type: 'external') Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.3004] device (eth0): carrier: link connected Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.3006] device (lo): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.3012] device (eth0): state change: unavailable -> disconnected (reason 'carrier-changed', managed-type: 'full') Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.3016] policy: auto-activating connection 'System eth0' (5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03) Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Network. ░░ Subject: A start job for unit network.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network.target has finished successfully. ░░ ░░ The job identifier is 191. Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.3030] device (eth0): Activation: starting connection 'System eth0' (5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03) Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.3032] device (eth0): state change: disconnected -> prepare (reason 'none', managed-type: 'full') Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.3034] manager: NetworkManager state is now CONNECTING Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.3035] device (eth0): state change: prepare -> config (reason 'none', managed-type: 'full') Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.3040] device (eth0): state change: config -> ip-config (reason 'none', managed-type: 'full') Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.3042] dhcp4 (eth0): activation: beginning transaction (timeout in 45 seconds) Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.3054] dhcp4 (eth0): state changed new lease, address=10.31.40.4 Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.3073] policy: set 'System eth0' (eth0) as default for IPv4 routing and DNS Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager Wait Online... ░░ Subject: A start job for unit NetworkManager-wait-online.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-wait-online.service has begun execution. ░░ ░░ The job identifier is 187. Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting GSSAPI Proxy Daemon... ░░ Subject: A start job for unit gssproxy.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit gssproxy.service has begun execution. ░░ ░░ The job identifier is 225. Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 400. Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.3202] device (eth0): state change: ip-config -> ip-check (reason 'none', managed-type: 'full') Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.3216] device (lo): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.3219] device (lo): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.3225] device (lo): Activation: successful, device activated. Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started GSSAPI Proxy Daemon. ░░ Subject: A start job for unit gssproxy.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit gssproxy.service has finished successfully. ░░ ░░ The job identifier is 225. Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: RPC security service for NFS client and server was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab). ░░ Subject: A start job for unit rpc-gssd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-gssd.service has finished successfully. ░░ ░░ The job identifier is 221. Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target NFS client services. ░░ Subject: A start job for unit nfs-client.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit nfs-client.target has finished successfully. ░░ ░░ The job identifier is 217. Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Preparation for Remote File Systems. ░░ Subject: A start job for unit remote-fs-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-fs-pre.target has finished successfully. ░░ ░░ The job identifier is 219. Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Remote File Systems. ░░ Subject: A start job for unit remote-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-fs.target has finished successfully. ░░ ░░ The job identifier is 216. Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: TPM2 PCR Barrier (User) was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f). ░░ Subject: A start job for unit systemd-pcrphase.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-pcrphase.service has finished successfully. ░░ ░░ The job identifier is 157. Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.3552] device (eth0): state change: ip-check -> secondaries (reason 'none', managed-type: 'full') Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.3558] device (eth0): state change: secondaries -> activated (reason 'none', managed-type: 'full') Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.3566] manager: NetworkManager state is now CONNECTED_SITE Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.3573] device (eth0): Activation: successful, device activated. Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.3582] manager: NetworkManager state is now CONNECTED_GLOBAL Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com NetworkManager[642]: [1750661790.3593] manager: startup complete Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Finished Network Manager Wait Online. ░░ Subject: A start job for unit NetworkManager-wait-online.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-wait-online.service has finished successfully. ░░ ░░ The job identifier is 187. Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Cloud-init: Network Stage... ░░ Subject: A start job for unit cloud-init.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.service has begun execution. ░░ ░░ The job identifier is 236. Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com chronyd[615]: Added source 10.11.160.238 Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com chronyd[615]: Added source 10.18.100.10 Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com chronyd[615]: Added source 10.2.32.37 Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com chronyd[615]: Added source 10.2.32.38 Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: Cloud-init v. 24.4-6.el9 running 'init' at Mon, 23 Jun 2025 06:56:30 +0000. Up 13.99 seconds. Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: ci-info: ++++++++++++++++++++++++++++++++++++++Net device info+++++++++++++++++++++++++++++++++++++++ Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: ci-info: | Device | Up | Address | Mask | Scope | Hw-Address | Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: ci-info: | eth0 | True | 10.31.40.4 | 255.255.252.0 | global | 0e:ba:18:13:79:93 | Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: ci-info: | eth0 | True | fe80::cba:18ff:fe13:7993/64 | . | link | 0e:ba:18:13:79:93 | Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: ci-info: | lo | True | 127.0.0.1 | 255.0.0.0 | host | . | Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: ci-info: | lo | True | ::1/128 | . | host | . | Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: ci-info: ++++++++++++++++++++++++++++Route IPv4 info+++++++++++++++++++++++++++++ Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: ci-info: | Route | Destination | Gateway | Genmask | Interface | Flags | Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: ci-info: | 0 | 0.0.0.0 | 10.31.40.1 | 0.0.0.0 | eth0 | UG | Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: ci-info: | 1 | 10.31.40.0 | 0.0.0.0 | 255.255.252.0 | eth0 | U | Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: ci-info: +++++++++++++++++++Route IPv6 info+++++++++++++++++++ Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: ci-info: +-------+-------------+---------+-----------+-------+ Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: ci-info: | Route | Destination | Gateway | Interface | Flags | Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: ci-info: +-------+-------------+---------+-----------+-------+ Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: ci-info: | 1 | fe80::/64 | :: | eth0 | U | Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: ci-info: | 3 | multicast | :: | eth0 | U | Jun 23 02:56:30 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: ci-info: +-------+-------------+---------+-----------+-------+ Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: Generating public/private rsa key pair. Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: Your identification has been saved in /etc/ssh/ssh_host_rsa_key Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: Your public key has been saved in /etc/ssh/ssh_host_rsa_key.pub Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: The key fingerprint is: Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: SHA256:cLz87+2HIUOOG8sVHBeTHXPYM8NVZdizpkcnk754ocY root@ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: The key's randomart image is: Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: +---[RSA 3072]----+ Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: | +@@| Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: | . . =O*| Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: | . o . o .*| Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: | + . + ++.| Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: | S + o+o.| Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: | .o =.+. | Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: | ..* =.= | Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: | +.E.+ .| Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: | oooo. | Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: +----[SHA256]-----+ Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: Generating public/private ecdsa key pair. Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: Your identification has been saved in /etc/ssh/ssh_host_ecdsa_key Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: Your public key has been saved in /etc/ssh/ssh_host_ecdsa_key.pub Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: The key fingerprint is: Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: SHA256:Ezw+QozO2Ot1VVezhrBA6grOHDAseqY3wpezBsz0LT8 root@ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: The key's randomart image is: Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: +---[ECDSA 256]---+ Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: | .o . ..| Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: |. o .. . o ..o| Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: |.+ . o.+ ....o | Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: |o.o= ... o . .. | Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: |= =o= ..S . | Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: |.B+o+o.. + | Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: |o.+B+.. . | Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: | o.+oE . | Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: | ... . | Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: +----[SHA256]-----+ Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: Generating public/private ed25519 key pair. Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: Your identification has been saved in /etc/ssh/ssh_host_ed25519_key Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: Your public key has been saved in /etc/ssh/ssh_host_ed25519_key.pub Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: The key fingerprint is: Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: SHA256:UZHSP1MGuPPBXsyL4jF9rduHKqa13NE1uoD0VnR6m+M root@ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: The key's randomart image is: Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: +--[ED25519 256]--+ Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: | .o+.. | Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: | ..+ o | Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: | .. + * . | Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: | .o B * | Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: | S. = O =.| Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: | . * *.=.=| Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: | o.B.o.* | Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: | o=o..=.o| Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: | .oo.oo.Eo| Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[733]: +----[SHA256]-----+ Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Finished Cloud-init: Network Stage. ░░ Subject: A start job for unit cloud-init.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.service has finished successfully. ░░ ░░ The job identifier is 236. Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Cloud-config availability. ░░ Subject: A start job for unit cloud-config.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.target has finished successfully. ░░ ░░ The job identifier is 240. Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Network is Online. ░░ Subject: A start job for unit network-online.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network-online.target has finished successfully. ░░ ░░ The job identifier is 186. Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Cloud-init: Config Stage... ░░ Subject: A start job for unit cloud-config.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.service has begun execution. ░░ ░░ The job identifier is 239. Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Crash recovery kernel arming... ░░ Subject: A start job for unit kdump.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit kdump.service has begun execution. ░░ ░░ The job identifier is 204. Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting The restraint harness.... ░░ Subject: A start job for unit restraintd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit restraintd.service has begun execution. ░░ ░░ The job identifier is 227. Jun 23 02:56:31 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Notify NFS peers of a restart... ░░ Subject: A start job for unit rpc-statd-notify.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-statd-notify.service has begun execution. ░░ ░░ The job identifier is 218. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting System Logging Service... ░░ Subject: A start job for unit rsyslog.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rsyslog.service has begun execution. ░░ ░░ The job identifier is 226. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting OpenSSH server daemon... ░░ Subject: A start job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 205. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com sm-notify[808]: Version 2.5.4 starting Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Permit User Sessions... ░░ Subject: A start job for unit systemd-user-sessions.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-user-sessions.service has begun execution. ░░ ░░ The job identifier is 215. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started The restraint harness.. ░░ Subject: A start job for unit restraintd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit restraintd.service has finished successfully. ░░ ░░ The job identifier is 227. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Notify NFS peers of a restart. ░░ Subject: A start job for unit rpc-statd-notify.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-statd-notify.service has finished successfully. ░░ ░░ The job identifier is 218. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com sshd[810]: Server listening on 0.0.0.0 port 22. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com sshd[810]: Server listening on :: port 22. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started OpenSSH server daemon. ░░ Subject: A start job for unit sshd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has finished successfully. ░░ ░░ The job identifier is 205. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Finished Permit User Sessions. ░░ Subject: A start job for unit systemd-user-sessions.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-user-sessions.service has finished successfully. ░░ ░░ The job identifier is 215. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Command Scheduler. ░░ Subject: A start job for unit crond.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit crond.service has finished successfully. ░░ ░░ The job identifier is 213. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Getty on tty1. ░░ Subject: A start job for unit getty@tty1.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit getty@tty1.service has finished successfully. ░░ ░░ The job identifier is 243. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com crond[815]: (CRON) STARTUP (1.5.7) Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com crond[815]: (CRON) INFO (Syslog will be used instead of sendmail.) Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com crond[815]: (CRON) INFO (RANDOM_DELAY will be scaled with factor 4% if used.) Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com crond[815]: (CRON) INFO (running with inotify support) Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Serial Getty on ttyS0. ░░ Subject: A start job for unit serial-getty@ttyS0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit serial-getty@ttyS0.service has finished successfully. ░░ ░░ The job identifier is 247. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Login Prompts. ░░ Subject: A start job for unit getty.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit getty.target has finished successfully. ░░ ░░ The job identifier is 242. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com rsyslogd[809]: [origin software="rsyslogd" swVersion="8.2412.0-2.el9" x-pid="809" x-info="https://www.rsyslog.com"] start Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started System Logging Service. ░░ Subject: A start job for unit rsyslog.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rsyslog.service has finished successfully. ░░ ░░ The job identifier is 226. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Multi-User System. ░░ Subject: A start job for unit multi-user.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit multi-user.target has finished successfully. ░░ ░░ The job identifier is 115. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Record Runlevel Change in UTMP... ░░ Subject: A start job for unit systemd-update-utmp-runlevel.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp-runlevel.service has begun execution. ░░ ░░ The job identifier is 211. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: systemd-update-utmp-runlevel.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-update-utmp-runlevel.service has successfully entered the 'dead' state. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Finished Record Runlevel Change in UTMP. ░░ Subject: A start job for unit systemd-update-utmp-runlevel.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp-runlevel.service has finished successfully. ░░ ░░ The job identifier is 211. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com rsyslogd[809]: imjournal: journal files changed, reloading... [v8.2412.0-2.el9 try https://www.rsyslog.com/e/0 ] Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[851]: Cloud-init v. 24.4-6.el9 running 'modules:config' at Mon, 23 Jun 2025 06:56:32 +0000. Up 15.55 seconds. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Stopping OpenSSH server daemon... ░░ Subject: A stop job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 487. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com sshd[810]: Received signal 15; terminating. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: sshd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit sshd.service has successfully entered the 'dead' state. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Stopped OpenSSH server daemon. ░░ Subject: A stop job for unit sshd.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd.service has finished. ░░ ░░ The job identifier is 487 and the job result is done. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting OpenSSH server daemon... ░░ Subject: A start job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 487. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com sshd[873]: Server listening on 0.0.0.0 port 22. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com sshd[873]: Server listening on :: port 22. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started OpenSSH server daemon. ░░ Subject: A start job for unit sshd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has finished successfully. ░░ ░░ The job identifier is 487. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Finished Cloud-init: Config Stage. ░░ Subject: A start job for unit cloud-config.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.service has finished successfully. ░░ ░░ The job identifier is 239. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Cloud-init: Final Stage... ░░ Subject: A start job for unit cloud-final.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-final.service has begun execution. ░░ ░░ The job identifier is 238. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com restraintd[814]: Listening on http://localhost:8081 Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com kdumpctl[818]: kdump: Detected change(s) in the following file(s): /etc/fstab Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[1101]: Cloud-init v. 24.4-6.el9 running 'modules:final' at Mon, 23 Jun 2025 06:56:32 +0000. Up 16.08 seconds. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[1107]: ############################################################# Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[1108]: -----BEGIN SSH HOST KEY FINGERPRINTS----- Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[1110]: 256 SHA256:Ezw+QozO2Ot1VVezhrBA6grOHDAseqY3wpezBsz0LT8 root@ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com (ECDSA) Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[1112]: 256 SHA256:UZHSP1MGuPPBXsyL4jF9rduHKqa13NE1uoD0VnR6m+M root@ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com (ED25519) Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[1114]: 3072 SHA256:cLz87+2HIUOOG8sVHBeTHXPYM8NVZdizpkcnk754ocY root@ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com (RSA) Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[1115]: -----END SSH HOST KEY FINGERPRINTS----- Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[1116]: ############################################################# Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com cloud-init[1101]: Cloud-init v. 24.4-6.el9 finished at Mon, 23 Jun 2025 06:56:32 +0000. Datasource DataSourceEc2Local. Up 16.20 seconds Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Finished Cloud-init: Final Stage. ░░ Subject: A start job for unit cloud-final.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-final.service has finished successfully. ░░ ░░ The job identifier is 238. Jun 23 02:56:32 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Cloud-init target. ░░ Subject: A start job for unit cloud-init.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.target has finished successfully. ░░ ░░ The job identifier is 235. Jun 23 02:56:33 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com kernel: block xvda: the capability attribute has been deprecated. Jun 23 02:56:33 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com kdumpctl[818]: kdump: Rebuilding /boot/initramfs-5.14.0-587.el9.x86_64kdump.img Jun 23 02:56:33 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1462]: dracut-057-87.git20250311.el9 Jun 23 02:56:33 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: Executing: /usr/bin/dracut --quiet --hostonly --hostonly-cmdline --hostonly-i18n --hostonly-mode strict --hostonly-nics --mount "/dev/disk/by-uuid/5295750c-4ddc-4d83-9af4-ea4188bf87ce /sysroot xfs rw,relatime,seclabel,attr2,inode64,logbufs=8,logbsize=32k,noquota" --squash-compressor zstd --no-hostonly-default-device --add-confdir /lib/kdump/dracut.conf.d -f /boot/initramfs-5.14.0-587.el9.x86_64kdump.img 5.14.0-587.el9.x86_64 Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'systemd-networkd' will not be installed, because command 'networkctl' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd-wait-online' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'systemd-resolved' will not be installed, because command 'resolvectl' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'busybox' will not be installed, because command 'busybox' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'connman' will not be installed, because command 'connmand' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'connman' will not be installed, because command 'connmanctl' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'network-wicked' will not be installed, because command 'wicked' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: Module 'ifcfg' will not be installed, because it's in the list to be omitted! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: Module 'plymouth' will not be installed, because it's in the list to be omitted! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'lvmmerge' will not be installed, because command 'lvm' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'lvmthinpool-monitor' will not be installed, because command 'lvm' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'btrfs' will not be installed, because command 'btrfs' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'dmraid' will not be installed, because command 'dmraid' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'lvm' will not be installed, because command 'lvm' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'mdraid' will not be installed, because command 'mdadm' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'pcsc' will not be installed, because command 'pcscd' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'tpm2-tss' will not be installed, because command 'tpm2' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'iscsi' will not be installed, because command 'iscsid' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'nvmf' will not be installed, because command 'nvme' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: Module 'resume' will not be installed, because it's in the list to be omitted! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'biosdevname' will not be installed, because command 'biosdevname' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: Module 'earlykdump' will not be installed, because it's in the list to be omitted! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'memstrack' will not be installed, because command 'memstrack' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: memstrack is not available Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: If you need to use rd.memdebug>=4, please install memstrack and procps-ng Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'systemd-resolved' will not be installed, because command 'resolvectl' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'busybox' will not be installed, because command 'busybox' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'connman' will not be installed, because command 'connmand' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'connman' will not be installed, because command 'connmanctl' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'network-wicked' will not be installed, because command 'wicked' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'lvmmerge' will not be installed, because command 'lvm' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'lvmthinpool-monitor' will not be installed, because command 'lvm' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'btrfs' will not be installed, because command 'btrfs' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'dmraid' will not be installed, because command 'dmraid' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'lvm' will not be installed, because command 'lvm' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'mdraid' will not be installed, because command 'mdadm' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'pcsc' will not be installed, because command 'pcscd' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'tpm2-tss' will not be installed, because command 'tpm2' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'iscsi' will not be installed, because command 'iscsid' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'nvmf' will not be installed, because command 'nvme' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: dracut module 'memstrack' will not be installed, because command 'memstrack' could not be found! Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: memstrack is not available Jun 23 02:56:34 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: If you need to use rd.memdebug>=4, please install memstrack and procps-ng Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Including module: systemd *** Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Including module: fips *** Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Including module: systemd-initrd *** Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Including module: rngd *** Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Including module: i18n *** Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: Cannot change IRQ 0 affinity: Operation not permitted Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: IRQ 0 affinity is now unmanaged Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: Cannot change IRQ 48 affinity: Operation not permitted Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: IRQ 48 affinity is now unmanaged Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: Cannot change IRQ 49 affinity: Operation not permitted Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: IRQ 49 affinity is now unmanaged Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: Cannot change IRQ 50 affinity: Operation not permitted Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: IRQ 50 affinity is now unmanaged Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: Cannot change IRQ 51 affinity: Operation not permitted Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: IRQ 51 affinity is now unmanaged Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: Cannot change IRQ 52 affinity: Operation not permitted Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: IRQ 52 affinity is now unmanaged Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: Cannot change IRQ 53 affinity: Operation not permitted Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: IRQ 53 affinity is now unmanaged Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: Cannot change IRQ 54 affinity: Operation not permitted Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: IRQ 54 affinity is now unmanaged Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: Cannot change IRQ 55 affinity: Operation not permitted Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: IRQ 55 affinity is now unmanaged Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: Cannot change IRQ 56 affinity: Operation not permitted Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: IRQ 56 affinity is now unmanaged Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: Cannot change IRQ 57 affinity: Operation not permitted Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: IRQ 57 affinity is now unmanaged Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: Cannot change IRQ 58 affinity: Operation not permitted Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: IRQ 58 affinity is now unmanaged Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: Cannot change IRQ 59 affinity: Operation not permitted Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com irqbalance[604]: IRQ 59 affinity is now unmanaged Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Including module: drm *** Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Including module: prefixdevname *** Jun 23 02:56:35 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Including module: kernel-modules *** Jun 23 02:56:36 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Including module: kernel-modules-extra *** Jun 23 02:56:36 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: kernel-modules-extra: configuration source "/run/depmod.d" does not exist Jun 23 02:56:36 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: kernel-modules-extra: configuration source "/lib/depmod.d" does not exist Jun 23 02:56:36 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: kernel-modules-extra: parsing configuration file "/etc/depmod.d/dist.conf" Jun 23 02:56:36 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: kernel-modules-extra: /etc/depmod.d/dist.conf: added "updates extra built-in weak-updates" to the list of search directories Jun 23 02:56:36 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Including module: fstab-sys *** Jun 23 02:56:36 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Including module: rootfs-block *** Jun 23 02:56:36 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Including module: terminfo *** Jun 23 02:56:36 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Including module: udev-rules *** Jun 23 02:56:36 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: Skipping udev rule: 91-permissions.rules Jun 23 02:56:36 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: Skipping udev rule: 80-drivers-modprobe.rules Jun 23 02:56:36 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Including module: dracut-systemd *** Jun 23 02:56:36 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Including module: usrmount *** Jun 23 02:56:36 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Including module: base *** Jun 23 02:56:36 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Including module: fs-lib *** Jun 23 02:56:36 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Including module: kdumpbase *** Jun 23 02:56:36 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com chronyd[615]: Selected source 10.2.32.38 Jun 23 02:56:36 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com chronyd[615]: System clock TAI offset set to 37 seconds Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Including module: microcode_ctl-fw_dir_override *** Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: microcode_ctl module: mangling fw_dir Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: microcode_ctl: reset fw_dir to "/lib/firmware/updates /lib/firmware" Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel"... Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: microcode_ctl: intel: caveats check for kernel version "5.14.0-587.el9.x86_64" passed, adding "/usr/share/microcode_ctl/ucode_with_caveats/intel" to fw_dir variable Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-2d-07"... Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: microcode_ctl: configuration "intel-06-2d-07" is ignored Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4e-03"... Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: microcode_ctl: configuration "intel-06-4e-03" is ignored Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4f-01"... Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: microcode_ctl: configuration "intel-06-4f-01" is ignored Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-55-04"... Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: microcode_ctl: configuration "intel-06-55-04" is ignored Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-5e-03"... Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: microcode_ctl: configuration "intel-06-5e-03" is ignored Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8c-01"... Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: microcode_ctl: configuration "intel-06-8c-01" is ignored Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8e-9e-0x-0xca"... Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: microcode_ctl: configuration "intel-06-8e-9e-0x-0xca" is ignored Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8e-9e-0x-dell"... Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: microcode_ctl: configuration "intel-06-8e-9e-0x-dell" is ignored Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: microcode_ctl: final fw_dir: "/usr/share/microcode_ctl/ucode_with_caveats/intel /lib/firmware/updates /lib/firmware" Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Including module: openssl *** Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Including module: shutdown *** Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Including module: squash *** Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Including modules done *** Jun 23 02:56:37 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Installing kernel module dependencies *** Jun 23 02:56:38 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Installing kernel module dependencies done *** Jun 23 02:56:38 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Resolving executable dependencies *** Jun 23 02:56:39 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Resolving executable dependencies done *** Jun 23 02:56:39 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Hardlinking files *** Jun 23 02:56:39 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: Mode: real Jun 23 02:56:39 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: Files: 449 Jun 23 02:56:39 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: Linked: 2 files Jun 23 02:56:39 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: Compared: 0 xattrs Jun 23 02:56:39 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: Compared: 11 files Jun 23 02:56:39 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: Saved: 56.21 KiB Jun 23 02:56:39 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: Duration: 0.006941 seconds Jun 23 02:56:39 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Hardlinking files done *** Jun 23 02:56:39 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Generating early-microcode cpio image *** Jun 23 02:56:39 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Constructing GenuineIntel.bin *** Jun 23 02:56:39 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Constructing GenuineIntel.bin *** Jun 23 02:56:39 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Store current command line parameters *** Jun 23 02:56:39 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: Stored kernel commandline: Jun 23 02:56:39 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: No dracut internal kernel commandline stored in the initramfs Jun 23 02:56:39 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Install squash loader *** Jun 23 02:56:39 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Squashing the files inside the initramfs *** Jun 23 02:56:40 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jun 23 02:56:46 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Squashing the files inside the initramfs done *** Jun 23 02:56:46 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Creating image file '/boot/initramfs-5.14.0-587.el9.x86_64kdump.img' *** Jun 23 02:56:46 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com dracut[1464]: *** Creating initramfs image file '/boot/initramfs-5.14.0-587.el9.x86_64kdump.img' done *** Jun 23 02:56:47 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com kdumpctl[818]: kdump: kexec: loaded kdump kernel Jun 23 02:56:47 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com kdumpctl[818]: kdump: Starting kdump: [OK] Jun 23 02:56:47 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Finished Crash recovery kernel arming. ░░ Subject: A start job for unit kdump.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit kdump.service has finished successfully. ░░ ░░ The job identifier is 204. Jun 23 02:56:47 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Startup finished in 1.158s (kernel) + 3.271s (initrd) + 26.101s (userspace) = 30.531s. ░░ Subject: System start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ All system services necessary queued for starting at boot have been ░░ started. Note that this does not mean that the machine is now idle as services ░░ might still be busy with completing start-up. ░░ ░░ Kernel start-up required 1158512 microseconds. ░░ ░░ Initrd start-up required 3271674 microseconds. ░░ ░░ Userspace start-up required 26101377 microseconds. Jun 23 02:57:00 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com sshd[4259]: Accepted publickey for root from 10.30.32.164 port 47258 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Created slice User Slice of UID 0. ░░ Subject: A start job for unit user-0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-0.slice has finished successfully. ░░ ░░ The job identifier is 554. Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting User Runtime Directory /run/user/0... ░░ Subject: A start job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 553. Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd-logind[606]: New session 1 of user root. ░░ Subject: A new session 1 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 1 has been created for the user root. ░░ ░░ The leading process of the session is 4259. Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Finished User Runtime Directory /run/user/0. ░░ Subject: A start job for unit user-runtime-dir@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has finished successfully. ░░ ░░ The job identifier is 553. Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting User Manager for UID 0... ░░ Subject: A start job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 488. Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[4263]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0) Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[4263]: Queued start job for default target Main User Target. Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[4263]: Created slice User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[4263]: Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system). ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[4263]: Started Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[4263]: Reached target Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[4263]: Reached target Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 7. Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[4263]: Starting D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 11. Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[4263]: Starting Create User's Volatile Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 3. Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[4263]: Finished Create User's Volatile Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[4263]: Listening on D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[4263]: Reached target Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[4263]: Reached target Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[4263]: Reached target Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[4263]: Startup finished in 74ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 0 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 74505 microseconds. Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started User Manager for UID 0. ░░ Subject: A start job for unit user@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has finished successfully. ░░ ░░ The job identifier is 488. Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Session 1 of User root. ░░ Subject: A start job for unit session-1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-1.scope has finished successfully. ░░ ░░ The job identifier is 556. Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com sshd[4259]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com sshd[4272]: Received disconnect from 10.30.32.164 port 47258:11: disconnected by user Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com sshd[4272]: Disconnected from user root 10.30.32.164 port 47258 Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com sshd[4259]: pam_unix(sshd:session): session closed for user root Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: session-1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-1.scope has successfully entered the 'dead' state. Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd-logind[606]: Session 1 logged out. Waiting for processes to exit. Jun 23 02:58:21 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd-logind[606]: Removed session 1. ░░ Subject: Session 1 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 1 has been terminated. Jun 23 02:58:23 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com sshd[4309]: Accepted publickey for root from 10.31.9.251 port 50784 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Jun 23 02:58:23 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com sshd[4308]: Accepted publickey for root from 10.31.9.251 port 50770 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Jun 23 02:58:23 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd-logind[606]: New session 3 of user root. ░░ Subject: A new session 3 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 3 has been created for the user root. ░░ ░░ The leading process of the session is 4309. Jun 23 02:58:23 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Session 3 of User root. ░░ Subject: A start job for unit session-3.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-3.scope has finished successfully. ░░ ░░ The job identifier is 625. Jun 23 02:58:23 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd-logind[606]: New session 4 of user root. ░░ Subject: A new session 4 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 4 has been created for the user root. ░░ ░░ The leading process of the session is 4308. Jun 23 02:58:23 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Session 4 of User root. ░░ Subject: A start job for unit session-4.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-4.scope has finished successfully. ░░ ░░ The job identifier is 694. Jun 23 02:58:23 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com sshd[4309]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Jun 23 02:58:23 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com sshd[4308]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Jun 23 02:58:23 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com sshd[4314]: Received disconnect from 10.31.9.251 port 50784:11: disconnected by user Jun 23 02:58:23 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com sshd[4314]: Disconnected from user root 10.31.9.251 port 50784 Jun 23 02:58:23 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com sshd[4309]: pam_unix(sshd:session): session closed for user root Jun 23 02:58:23 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: session-3.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-3.scope has successfully entered the 'dead' state. Jun 23 02:58:23 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd-logind[606]: Session 3 logged out. Waiting for processes to exit. Jun 23 02:58:23 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd-logind[606]: Removed session 3. ░░ Subject: Session 3 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 3 has been terminated. Jun 23 02:58:41 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com unknown: Running test '/Prepare-managed-node/tests/prep_managed_node' (serial number 1) with reboot count 0 and test restart count 0. (Be aware the test name is sanitized!) Jun 23 02:58:42 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Hostname Service... ░░ Subject: A start job for unit systemd-hostnamed.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has begun execution. ░░ ░░ The job identifier is 764. Jun 23 02:58:42 ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Hostname Service. ░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has finished successfully. ░░ ░░ The job identifier is 764. Jun 23 02:58:42 managed-node3 systemd-hostnamed[6211]: Hostname set to (static) Jun 23 02:58:42 managed-node3 NetworkManager[642]: [1750661922.3704] hostname: static hostname changed from "ip-10-31-40-4.testing-farm.us-east-1.aws.redhat.com" to "managed-node3" Jun 23 02:58:42 managed-node3 systemd[1]: Starting Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 830. Jun 23 02:58:42 managed-node3 systemd[1]: Started Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 830. Jun 23 02:58:43 managed-node3 unknown: Leaving test '/Prepare-managed-node/tests/prep_managed_node' (serial number 1). (Be aware the test name is sanitized!) Jun 23 02:58:52 managed-node3 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jun 23 02:59:12 managed-node3 systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. Jun 23 02:59:43 managed-node3 sshd[6910]: Accepted publickey for root from 10.31.14.123 port 56534 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Jun 23 02:59:43 managed-node3 systemd-logind[606]: New session 5 of user root. ░░ Subject: A new session 5 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 5 has been created for the user root. ░░ ░░ The leading process of the session is 6910. Jun 23 02:59:43 managed-node3 systemd[1]: Started Session 5 of User root. ░░ Subject: A start job for unit session-5.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-5.scope has finished successfully. ░░ ░░ The job identifier is 896. Jun 23 02:59:43 managed-node3 sshd[6910]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Jun 23 02:59:45 managed-node3 python3.9[7087]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jun 23 02:59:46 managed-node3 python3.9[7262]: ansible-service_facts Invoked Jun 23 02:59:48 managed-node3 python3.9[7494]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jun 23 02:59:49 managed-node3 python3.9[7643]: ansible-ansible.legacy.dnf Invoked with name=['pcp', 'pcp-zeroconf'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jun 23 03:00:17 managed-node3 kernel: SELinux: Converting 372 SID table entries... Jun 23 03:00:17 managed-node3 kernel: SELinux: policy capability network_peer_controls=1 Jun 23 03:00:17 managed-node3 kernel: SELinux: policy capability open_perms=1 Jun 23 03:00:17 managed-node3 kernel: SELinux: policy capability extended_socket_class=1 Jun 23 03:00:17 managed-node3 kernel: SELinux: policy capability always_check_network=0 Jun 23 03:00:17 managed-node3 kernel: SELinux: policy capability cgroup_seclabel=1 Jun 23 03:00:17 managed-node3 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jun 23 03:00:17 managed-node3 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Jun 23 03:00:19 managed-node3 dbus-broker-launch[600]: avc: op=load_policy lsm=selinux seqno=2 res=1 Jun 23 03:00:19 managed-node3 systemd[1]: Starting PCP Reboot Initialization Helper Service... ░░ Subject: A start job for unit pcp-reboot-init.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pcp-reboot-init.service has begun execution. ░░ ░░ The job identifier is 965. Jun 23 03:00:19 managed-node3 systemd[1]: Finished PCP Reboot Initialization Helper Service. ░░ Subject: A start job for unit pcp-reboot-init.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pcp-reboot-init.service has finished successfully. ░░ ░░ The job identifier is 965. Jun 23 03:00:19 managed-node3 systemd[1]: Reloading. Jun 23 03:00:19 managed-node3 systemd-rc-local-generator[8180]: /etc/rc.d/rc.local is not marked executable, skipping. Jun 23 03:00:19 managed-node3 systemd[1]: Starting Performance Metrics Collector Daemon... ░░ Subject: A start job for unit pmcd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has begun execution. ░░ ░░ The job identifier is 968. Jun 23 03:00:19 managed-node3 systemd[1]: Started Performance Metrics Collector Daemon. ░░ Subject: A start job for unit pmcd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has finished successfully. ░░ ░░ The job identifier is 968. Jun 23 03:00:19 managed-node3 systemd[1]: Starting Performance Metrics Inference Engine... ░░ Subject: A start job for unit pmie.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie.service has begun execution. ░░ ░░ The job identifier is 1103. Jun 23 03:00:19 managed-node3 systemd[1]: Starting Performance Metrics Archive Logger... ░░ Subject: A start job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 1033. Jun 23 03:00:20 managed-node3 pmcd[8601]: Installing dm PMDA ... Jun 23 03:00:20 managed-node3 rc[8454]: /etc/pcp/pmie/rc: Warning: Performance Co-Pilot Inference Engine (pmie) not permanently enabled. Jun 23 03:00:20 managed-node3 rc[8457]: /etc/pcp/pmlogger/rc: Warning: Performance Co-Pilot archive logger(s) not permanently enabled. Jun 23 03:00:20 managed-node3 rc[8454]: To enable pmie, run the following as root: Jun 23 03:00:20 managed-node3 rc[8454]: # /usr/bin/systemctl enable pmie.service Jun 23 03:00:20 managed-node3 rc[8457]: To enable pmlogger, run the following as root: Jun 23 03:00:20 managed-node3 rc[8457]: # /usr/bin/systemctl enable pmlogger.service Jun 23 03:00:20 managed-node3 systemd[1]: Started Performance Metrics Inference Engine. ░░ Subject: A start job for unit pmie.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie.service has finished successfully. ░░ ░░ The job identifier is 1103. Jun 23 03:00:20 managed-node3 systemd[1]: Started Half-hourly check of PMIE instances. ░░ Subject: A start job for unit pmie_check.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_check.timer has finished successfully. ░░ ░░ The job identifier is 1170. Jun 23 03:00:20 managed-node3 systemd[1]: Started Daily processing of PMIE logs. ░░ Subject: A start job for unit pmie_daily.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_daily.timer has finished successfully. ░░ ░░ The job identifier is 1171. Jun 23 03:00:20 managed-node3 systemd[1]: Starting pmie farm service... ░░ Subject: A start job for unit pmie_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm.service has begun execution. ░░ ░░ The job identifier is 1168. Jun 23 03:00:20 managed-node3 systemd[1]: Starting Check PMIE instances are running... ░░ Subject: A start job for unit pmie_check.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_check.service has begun execution. ░░ ░░ The job identifier is 1173. Jun 23 03:00:20 managed-node3 systemd[1]: Started pmie farm service. ░░ Subject: A start job for unit pmie_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm.service has finished successfully. ░░ ░░ The job identifier is 1168. Jun 23 03:00:20 managed-node3 systemd[1]: Started Half-hourly check of pmie farm instances. ░░ Subject: A start job for unit pmie_farm_check.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm_check.timer has finished successfully. ░░ ░░ The job identifier is 1169. Jun 23 03:00:20 managed-node3 systemd[1]: Starting Check and migrate non-primary pmie farm instances... ░░ Subject: A start job for unit pmie_farm_check.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm_check.service has begun execution. ░░ ░░ The job identifier is 1238. Jun 23 03:00:20 managed-node3 systemd[1]: Started Check PMIE instances are running. ░░ Subject: A start job for unit pmie_check.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_check.service has finished successfully. ░░ ░░ The job identifier is 1173. Jun 23 03:00:20 managed-node3 systemd[1]: Started Check and migrate non-primary pmie farm instances. ░░ Subject: A start job for unit pmie_farm_check.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm_check.service has finished successfully. ░░ ░░ The job identifier is 1238. Jun 23 03:00:20 managed-node3 systemd[1]: pmie_farm_check.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmie_farm_check.service has successfully entered the 'dead' state. Jun 23 03:00:20 managed-node3 systemd[1]: pmie_check.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmie_check.service has successfully entered the 'dead' state. Jun 23 03:00:21 managed-node3 systemd[1]: Started Performance Metrics Archive Logger. ░░ Subject: A start job for unit pmlogger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has finished successfully. ░░ ░░ The job identifier is 1033. Jun 23 03:00:21 managed-node3 systemd[1]: Started Half-hourly check of pmlogger instances. ░░ Subject: A start job for unit pmlogger_check.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_check.timer has finished successfully. ░░ ░░ The job identifier is 1101. Jun 23 03:00:21 managed-node3 systemd[1]: Started Daily processing of archives. ░░ Subject: A start job for unit pmlogger_daily.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_daily.timer has finished successfully. ░░ ░░ The job identifier is 1100. Jun 23 03:00:21 managed-node3 systemd[1]: Starting pmlogger farm service... ░░ Subject: A start job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 1034. Jun 23 03:00:21 managed-node3 systemd[1]: Reloading. Jun 23 03:00:21 managed-node3 systemd-rc-local-generator[9452]: /etc/rc.d/rc.local is not marked executable, skipping. Jun 23 03:00:21 managed-node3 systemd[1]: Started pmlogger farm service. ░░ Subject: A start job for unit pmlogger_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has finished successfully. ░░ ░░ The job identifier is 1034. Jun 23 03:00:21 managed-node3 systemd[1]: Started Half-hourly check of pmlogger farm instances. ░░ Subject: A start job for unit pmlogger_farm_check.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm_check.timer has finished successfully. ░░ ░░ The job identifier is 1099. Jun 23 03:00:21 managed-node3 systemd[1]: Starting Check pmlogger instances are running... ░░ Subject: A start job for unit pmlogger_check.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_check.service has begun execution. ░░ ░░ The job identifier is 1303. Jun 23 03:00:21 managed-node3 systemd[1]: Started Check pmlogger instances are running. ░░ Subject: A start job for unit pmlogger_check.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_check.service has finished successfully. ░░ ░░ The job identifier is 1303. Jun 23 03:00:21 managed-node3 systemd[1]: Starting Check and migrate non-primary pmlogger farm instances... ░░ Subject: A start job for unit pmlogger_farm_check.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm_check.service has begun execution. ░░ ░░ The job identifier is 1368. Jun 23 03:00:21 managed-node3 systemd[1]: Started Check and migrate non-primary pmlogger farm instances. ░░ Subject: A start job for unit pmlogger_farm_check.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm_check.service has finished successfully. ░░ ░░ The job identifier is 1368. Jun 23 03:00:22 managed-node3 systemd[1]: pmlogger_farm_check.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger_farm_check.service has successfully entered the 'dead' state. Jun 23 03:00:22 managed-node3 systemd[1]: pmlogger_check.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger_check.service has successfully entered the 'dead' state. Jun 23 03:00:22 managed-node3 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-rc576b5a7e75042cbb98837cdc66eacff.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-rc576b5a7e75042cbb98837cdc66eacff.service has finished successfully. ░░ ░░ The job identifier is 1433. Jun 23 03:00:22 managed-node3 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1498. Jun 23 03:00:22 managed-node3 systemd[1]: Reloading. Jun 23 03:00:22 managed-node3 systemd-rc-local-generator[10481]: /etc/rc.d/rc.local is not marked executable, skipping. Jun 23 03:00:23 managed-node3 systemd[1]: Queuing reload/restart jobs for marked units… Jun 23 03:00:23 managed-node3 systemd[1]: Stopping pmie farm service... ░░ Subject: A stop job for unit pmie_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie_farm.service has begun execution. ░░ ░░ The job identifier is 1698. Jun 23 03:00:23 managed-node3 systemd[1]: Stopping pmlogger farm service... ░░ Subject: A stop job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 1770. Jun 23 03:00:23 managed-node3 systemd[1]: pmie_farm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmie_farm.service has successfully entered the 'dead' state. Jun 23 03:00:23 managed-node3 systemd[1]: Stopped pmie farm service. ░░ Subject: A stop job for unit pmie_farm.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie_farm.service has finished. ░░ ░░ The job identifier is 1698 and the job result is done. Jun 23 03:00:23 managed-node3 systemd[1]: pmlogger_farm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger_farm.service has successfully entered the 'dead' state. Jun 23 03:00:23 managed-node3 systemd[1]: Stopped pmlogger farm service. ░░ Subject: A stop job for unit pmlogger_farm.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has finished. ░░ ░░ The job identifier is 1770 and the job result is done. Jun 23 03:00:23 managed-node3 systemd[1]: Stopping Performance Metrics Inference Engine... ░░ Subject: A stop job for unit pmie.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie.service has begun execution. ░░ ░░ The job identifier is 1628. Jun 23 03:00:23 managed-node3 systemd[1]: Stopping Performance Metrics Archive Logger... ░░ Subject: A stop job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 1700. Jun 23 03:00:23 managed-node3 systemd[1]: pmlogger.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service has successfully entered the 'dead' state. Jun 23 03:00:23 managed-node3 systemd[1]: Stopped Performance Metrics Archive Logger. ░░ Subject: A stop job for unit pmlogger.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has finished. ░░ ░░ The job identifier is 1700 and the job result is done. Jun 23 03:00:23 managed-node3 systemd[1]: pmlogger.service: Consumed 1.408s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service completed and consumed the indicated resources. Jun 23 03:00:23 managed-node3 systemd[1]: pmie.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmie.service has successfully entered the 'dead' state. Jun 23 03:00:23 managed-node3 systemd[1]: Stopped Performance Metrics Inference Engine. ░░ Subject: A stop job for unit pmie.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie.service has finished. ░░ ░░ The job identifier is 1628 and the job result is done. Jun 23 03:00:23 managed-node3 systemd[1]: Stopping Performance Metrics Collector Daemon... ░░ Subject: A stop job for unit pmcd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmcd.service has begun execution. ░░ ░░ The job identifier is 1563. Jun 23 03:00:24 managed-node3 pmcd[8602]: Terminated Jun 23 03:00:24 managed-node3 pmcd[8478]: _pmda_setup: Interrupted! Jun 23 03:00:24 managed-node3 pmcd[8478]: _pmda_setup_cleanup: reset .NeedInstall for dm PMDA Jun 23 03:00:24 managed-node3 systemd[1]: pmcd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmcd.service has successfully entered the 'dead' state. Jun 23 03:00:24 managed-node3 systemd[1]: Stopped Performance Metrics Collector Daemon. ░░ Subject: A stop job for unit pmcd.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmcd.service has finished. ░░ ░░ The job identifier is 1563 and the job result is done. Jun 23 03:00:24 managed-node3 systemd[1]: pmcd.service: Consumed 1.790s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmcd.service completed and consumed the indicated resources. Jun 23 03:00:24 managed-node3 systemd[1]: Starting Performance Metrics Collector Daemon... ░░ Subject: A start job for unit pmcd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has begun execution. ░░ ░░ The job identifier is 1563. Jun 23 03:00:24 managed-node3 systemd[1]: Started Performance Metrics Collector Daemon. ░░ Subject: A start job for unit pmcd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has finished successfully. ░░ ░░ The job identifier is 1563. Jun 23 03:00:24 managed-node3 systemd[1]: Starting Performance Metrics Inference Engine... ░░ Subject: A start job for unit pmie.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie.service has begun execution. ░░ ░░ The job identifier is 1628. Jun 23 03:00:24 managed-node3 systemd[1]: Starting Performance Metrics Archive Logger... ░░ Subject: A start job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 1700. Jun 23 03:00:24 managed-node3 pmcd[12923]: Installing dm PMDA ... Jun 23 03:00:25 managed-node3 systemd[1]: Started Performance Metrics Inference Engine. ░░ Subject: A start job for unit pmie.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie.service has finished successfully. ░░ ░░ The job identifier is 1628. Jun 23 03:00:25 managed-node3 systemd[1]: Starting pmie farm service... ░░ Subject: A start job for unit pmie_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm.service has begun execution. ░░ ░░ The job identifier is 1698. Jun 23 03:00:25 managed-node3 systemd[1]: Started pmie farm service. ░░ Subject: A start job for unit pmie_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm.service has finished successfully. ░░ ░░ The job identifier is 1698. Jun 23 03:00:25 managed-node3 systemd[1]: Started Performance Metrics Archive Logger. ░░ Subject: A start job for unit pmlogger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has finished successfully. ░░ ░░ The job identifier is 1700. Jun 23 03:00:25 managed-node3 systemd[1]: Starting pmlogger farm service... ░░ Subject: A start job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 1770. Jun 23 03:00:25 managed-node3 systemd[1]: Started pmlogger farm service. ░░ Subject: A start job for unit pmlogger_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has finished successfully. ░░ ░░ The job identifier is 1770. Jun 23 03:00:26 managed-node3 sh[10466]: /usr/bin/mandb: warning: whatis for ASIdentifierChoice_new.3ossl.gz exceeds 8192 bytes, truncating. Jun 23 03:00:27 managed-node3 sh[10466]: /usr/bin/mandb: warning: whatis for d2i_X509.3ossl.gz exceeds 8192 bytes, truncating. Jun 23 03:00:27 managed-node3 python3.9[16065]: ansible-ansible.legacy.dnf Invoked with name=['cyrus-sasl-lib', 'cyrus-sasl-scram'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jun 23 03:00:28 managed-node3 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jun 23 03:00:28 managed-node3 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1498. Jun 23 03:00:28 managed-node3 systemd[1]: man-db-cache-update.service: Consumed 3.407s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service completed and consumed the indicated resources. Jun 23 03:00:28 managed-node3 systemd[1]: run-rc576b5a7e75042cbb98837cdc66eacff.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-rc576b5a7e75042cbb98837cdc66eacff.service has successfully entered the 'dead' state. Jun 23 03:00:29 managed-node3 python3.9[16978]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/pcp/pmcd/pmcd.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 23 03:00:30 managed-node3 python3.9[17131]: ansible-file Invoked with path=/etc/pcp/labels state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:30 managed-node3 python3.9[17280]: ansible-file Invoked with path=/etc/pcp/labels/optional state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:31 managed-node3 python3.9[17429]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:00:31 managed-node3 python3.9[17549]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1750662030.9876294-8387-37468696850279/.source dest=/etc/pcp/labels/ansible-managed mode=0644 follow=False _original_basename=pmcd.explicit.labels.j2 checksum=5f36b2ea290645ee34d943220a14b54ee5ea5be5 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:32 managed-node3 python3.9[17698]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/optional/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:00:32 managed-node3 python3.9[17818]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1750662031.883908-8420-55499562385575/.source dest=/etc/pcp/labels/optional/ansible-managed mode=0644 follow=False _original_basename=pmcd.implicit.labels.j2 checksum=5f36b2ea290645ee34d943220a14b54ee5ea5be5 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:32 managed-node3 python3.9[17967]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmcd follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:00:33 managed-node3 python3.9[18089]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1750662032.5890114-8442-208934037361830/.source dest=/etc/sysconfig/pmcd mode=0644 follow=False _original_basename=pmcd.defaults.j2 checksum=7518789c091387cd9c322e1a8fa8aad21d4efbd3 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:33 managed-node3 python3.9[18238]: ansible-user Invoked with name=metrics system=True state=present non_unique=False force=False remove=False create_home=True move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node3 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Jun 23 03:00:33 managed-node3 useradd[18240]: new group: name=metrics, GID=995 Jun 23 03:00:33 managed-node3 useradd[18240]: new user: name=metrics, UID=995, GID=995, home=/home/metrics, shell=/bin/bash, from=/dev/pts/0 Jun 23 03:00:34 managed-node3 python3.9[18395]: ansible-ansible.legacy.command Invoked with _raw_params=set -eu if set -o | grep -q pipefail; then set -o pipefail # pipefail not supported on debian, some ubuntu fi if ! sasldblistusers2 -f "/etc/pcp/passwd.db" | grep -q "^metrics@"; then echo "Creating new metrics user in /etc/pcp/passwd.db" echo "metrics" | saslpasswd2 -a pmcd "metrics" chown root:pcp "/etc/pcp/passwd.db" chmod 640 "/etc/pcp/passwd.db" fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 23 03:00:34 managed-node3 sasldblistusers2[18399]: SASL error opening password file. Have you performed the migration from db2 using cyrusbdb2current? Jun 23 03:00:34 managed-node3 sasldblistusers2[18399]: _sasldb_getkeyhandle has failed Jun 23 03:00:34 managed-node3 python3.9[18553]: ansible-ansible.legacy.stat Invoked with path=/etc/sasl2/pmcd.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:00:34 managed-node3 python3.9[18677]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1750662034.3580801-8510-128707348603513/.source.conf dest=/etc/sasl2/pmcd.conf mode=0644 follow=False _original_basename=pmcd.sasl2.conf.j2 checksum=615d2de55ab86108da0c7e6b64988fecb4169771 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:35 managed-node3 python3.9[18827]: ansible-ansible.legacy.systemd Invoked with name=pmcd state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jun 23 03:00:35 managed-node3 systemd[1]: Stopping Performance Metrics Collector Daemon... ░░ Subject: A stop job for unit pmcd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmcd.service has begun execution. ░░ ░░ The job identifier is 1771. Jun 23 03:00:36 managed-node3 pmcd[12926]: Terminated Jun 23 03:00:36 managed-node3 pmcd[12734]: _pmda_setup: Interrupted! Jun 23 03:00:36 managed-node3 pmcd[12734]: _pmda_setup_cleanup: reset .NeedInstall for dm PMDA Jun 23 03:00:36 managed-node3 systemd[1]: pmcd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmcd.service has successfully entered the 'dead' state. Jun 23 03:00:36 managed-node3 systemd[1]: Stopped Performance Metrics Collector Daemon. ░░ Subject: A stop job for unit pmcd.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmcd.service has finished. ░░ ░░ The job identifier is 1771 and the job result is done. Jun 23 03:00:36 managed-node3 systemd[1]: pmcd.service: Consumed 1.272s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmcd.service completed and consumed the indicated resources. Jun 23 03:00:36 managed-node3 systemd[1]: Starting Performance Metrics Collector Daemon... ░░ Subject: A start job for unit pmcd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has begun execution. ░░ ░░ The job identifier is 1771. Jun 23 03:00:36 managed-node3 systemd[1]: Started Performance Metrics Collector Daemon. ░░ Subject: A start job for unit pmcd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has finished successfully. ░░ ░░ The job identifier is 1771. Jun 23 03:00:36 managed-node3 pmcd[19273]: Installing dm PMDA ... Jun 23 03:00:37 managed-node3 python3.9[19440]: ansible-file Invoked with path=/etc/pcp/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:37 managed-node3 python3.9[19589]: ansible-file Invoked with path=/etc/pcp/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:37 managed-node3 python3.9[19738]: ansible-file Invoked with path=/etc/pcp/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:38 managed-node3 python3.9[19887]: ansible-file Invoked with path=/etc/pcp/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:38 managed-node3 python3.9[20036]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:38 managed-node3 python3.9[20185]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:39 managed-node3 python3.9[20334]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:39 managed-node3 python3.9[20483]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:40 managed-node3 python3.9[20632]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcplistenoverflows follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:00:40 managed-node3 python3.9[20754]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1750662039.8079069-8735-264627136271424/.source dest=/etc/pcp/pmieconf/network/tcplistenoverflows owner=root group=root mode=0644 _original_basename=tcplistenoverflows follow=False checksum=608d8a6ac6ee33bb86b77d28ba24fbcd378db43d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:40 managed-node3 python3.9[20903]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldocookies follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:00:41 managed-node3 python3.9[21025]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1750662040.5014353-8735-22868928356233/.source dest=/etc/pcp/pmieconf/network/tcpqfulldocookies owner=root group=root mode=0644 _original_basename=tcpqfulldocookies follow=False checksum=3256a5c2e8d07a20d8e97a08c0ab163252b0beae backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:41 managed-node3 python3.9[21174]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldrops follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:00:41 managed-node3 python3.9[21296]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1750662041.211661-8735-207638091027671/.source dest=/etc/pcp/pmieconf/network/tcpqfulldrops owner=root group=root mode=0644 _original_basename=tcpqfulldrops follow=False checksum=37b2bd7f2430bd9678ab078c5e69a53bea556524 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:42 managed-node3 python3.9[21448]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/power/thermal_throttle follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:00:42 managed-node3 python3.9[21570]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1750662041.900119-8735-92793942068771/.source dest=/etc/pcp/pmieconf/power/thermal_throttle owner=root group=root mode=0644 _original_basename=thermal_throttle follow=False checksum=1d53d6182709617c8f633339652d8d9e75f3b603 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:42 managed-node3 python3.9[21719]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/zeroconf/all_threads follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:00:43 managed-node3 python3.9[21841]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1750662042.5922222-8735-162984466757628/.source dest=/etc/pcp/pmieconf/zeroconf/all_threads owner=root group=root mode=0644 _original_basename=all_threads follow=False checksum=65169db16dcaa224c211373001adc3addf1031c4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:43 managed-node3 python3.9[21990]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/filesys/vfs_files follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:00:43 managed-node3 python3.9[22065]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/filesys/vfs_files _original_basename=vfs_files recurse=False state=file path=/etc/pcp/pmieconf/filesys/vfs_files force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:44 managed-node3 python3.9[22214]: ansible-lineinfile Invoked with state=absent path=/var/lib/pcp/config/pmie/config.default regexp=//.*global webhook_endpoint = "" backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None line=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:44 managed-node3 python3.9[22363]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcplistenoverflows dest=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:45 managed-node3 python3.9[22512]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldocookies dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:45 managed-node3 python3.9[22661]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldrops dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:45 managed-node3 python3.9[22810]: ansible-file Invoked with src=/etc/pcp/pmieconf/power/thermal_throttle dest=/var/lib/pcp/config/pmieconf/power/thermal_throttle state=link force=True path=/var/lib/pcp/config/pmieconf/power/thermal_throttle recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:46 managed-node3 python3.9[22959]: ansible-file Invoked with src=/etc/pcp/pmieconf/zeroconf/all_threads dest=/var/lib/pcp/config/pmieconf/zeroconf/all_threads state=link force=True path=/var/lib/pcp/config/pmieconf/zeroconf/all_threads recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:46 managed-node3 python3.9[23108]: ansible-file Invoked with src=/etc/pcp/pmieconf/filesys/vfs_files dest=/var/lib/pcp/config/pmieconf/filesys/vfs_files state=link force=True path=/var/lib/pcp/config/pmieconf/filesys/vfs_files recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:47 managed-node3 python3.9[23260]: ansible-ansible.legacy.systemd Invoked with name=pmie state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jun 23 03:00:47 managed-node3 systemd[1]: Stopping pmie farm service... ░░ Subject: A stop job for unit pmie_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie_farm.service has begun execution. ░░ ░░ The job identifier is 1906. Jun 23 03:00:47 managed-node3 systemd[1]: pmie_farm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmie_farm.service has successfully entered the 'dead' state. Jun 23 03:00:47 managed-node3 systemd[1]: Stopped pmie farm service. ░░ Subject: A stop job for unit pmie_farm.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie_farm.service has finished. ░░ ░░ The job identifier is 1906 and the job result is done. Jun 23 03:00:47 managed-node3 systemd[1]: Stopping Performance Metrics Inference Engine... ░░ Subject: A stop job for unit pmie.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie.service has begun execution. ░░ ░░ The job identifier is 1836. Jun 23 03:00:47 managed-node3 systemd[1]: pmie.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmie.service has successfully entered the 'dead' state. Jun 23 03:00:47 managed-node3 systemd[1]: Stopped Performance Metrics Inference Engine. ░░ Subject: A stop job for unit pmie.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie.service has finished. ░░ ░░ The job identifier is 1836 and the job result is done. Jun 23 03:00:47 managed-node3 systemd[1]: Starting Performance Metrics Inference Engine... ░░ Subject: A start job for unit pmie.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie.service has begun execution. ░░ ░░ The job identifier is 1836. Jun 23 03:00:47 managed-node3 systemd[1]: Started Performance Metrics Inference Engine. ░░ Subject: A start job for unit pmie.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie.service has finished successfully. ░░ ░░ The job identifier is 1836. Jun 23 03:00:47 managed-node3 systemd[1]: Starting pmie farm service... ░░ Subject: A start job for unit pmie_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm.service has begun execution. ░░ ░░ The job identifier is 1906. Jun 23 03:00:47 managed-node3 systemd[1]: Started pmie farm service. ░░ Subject: A start job for unit pmie_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm.service has finished successfully. ░░ ░░ The job identifier is 1906. Jun 23 03:00:48 managed-node3 python3.9[23936]: ansible-lineinfile Invoked with path=/etc/pcp.conf regexp=^PCP_ARCHIVE_DIR= line=PCP_ARCHIVE_DIR=/var/log/pcp/pmlogger state=present backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:48 managed-node3 python3.9[24085]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:00:49 managed-node3 python3.9[24207]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1750662048.4415574-9110-222567035273705/.source dest=/etc/sysconfig/pmlogger mode=0644 follow=False _original_basename=pmlogger.defaults.j2 checksum=67bc35973101c614e92b1990f8bebfffc39fe498 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:49 managed-node3 python3.9[24356]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger_timers follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:00:49 managed-node3 python3.9[24478]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1750662049.185823-9140-101097722366323/.source dest=/etc/sysconfig/pmlogger_timers mode=0644 follow=False _original_basename=pmlogger.timers.j2 checksum=df7bd3b5b6f1de3af164aab81441c7251a13a298 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:00:50 managed-node3 python3.9[24627]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jun 23 03:00:50 managed-node3 systemd[1]: Stopping pmlogger farm service... ░░ Subject: A stop job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 1977. Jun 23 03:00:50 managed-node3 systemd[1]: pmlogger_farm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger_farm.service has successfully entered the 'dead' state. Jun 23 03:00:50 managed-node3 systemd[1]: Stopped pmlogger farm service. ░░ Subject: A stop job for unit pmlogger_farm.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has finished. ░░ ░░ The job identifier is 1977 and the job result is done. Jun 23 03:00:50 managed-node3 systemd[1]: Stopping Performance Metrics Archive Logger... ░░ Subject: A stop job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 1907. Jun 23 03:00:50 managed-node3 systemd[1]: pmlogger.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service has successfully entered the 'dead' state. Jun 23 03:00:50 managed-node3 systemd[1]: Stopped Performance Metrics Archive Logger. ░░ Subject: A stop job for unit pmlogger.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has finished. ░░ ░░ The job identifier is 1907 and the job result is done. Jun 23 03:00:50 managed-node3 systemd[1]: pmlogger.service: Consumed 1.525s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service completed and consumed the indicated resources. Jun 23 03:00:50 managed-node3 systemd[1]: Starting Performance Metrics Archive Logger... ░░ Subject: A start job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 1907. Jun 23 03:00:51 managed-node3 systemd[1]: Started Performance Metrics Archive Logger. ░░ Subject: A start job for unit pmlogger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has finished successfully. ░░ ░░ The job identifier is 1907. Jun 23 03:00:51 managed-node3 systemd[1]: Starting pmlogger farm service... ░░ Subject: A start job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 1977. Jun 23 03:00:51 managed-node3 systemd[1]: Started pmlogger farm service. ░░ Subject: A start job for unit pmlogger_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has finished successfully. ░░ ░░ The job identifier is 1977. Jun 23 03:00:52 managed-node3 python3.9[25681]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Jun 23 03:00:52 managed-node3 systemd[1]: Stopping pmlogger farm service... ░░ Subject: A stop job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 2048. Jun 23 03:00:52 managed-node3 systemd[1]: pmlogger_farm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger_farm.service has successfully entered the 'dead' state. Jun 23 03:00:52 managed-node3 systemd[1]: Stopped pmlogger farm service. ░░ Subject: A stop job for unit pmlogger_farm.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has finished. ░░ ░░ The job identifier is 2048 and the job result is done. Jun 23 03:00:52 managed-node3 systemd[1]: Stopping Performance Metrics Archive Logger... ░░ Subject: A stop job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 1978. Jun 23 03:00:52 managed-node3 systemd[1]: pmlogger.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service has successfully entered the 'dead' state. Jun 23 03:00:52 managed-node3 systemd[1]: Stopped Performance Metrics Archive Logger. ░░ Subject: A stop job for unit pmlogger.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has finished. ░░ ░░ The job identifier is 1978 and the job result is done. Jun 23 03:00:52 managed-node3 systemd[1]: pmlogger.service: Consumed 1.280s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service completed and consumed the indicated resources. Jun 23 03:00:52 managed-node3 systemd[1]: Starting Performance Metrics Archive Logger... ░░ Subject: A start job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 1978. Jun 23 03:00:53 managed-node3 systemd[1]: Started Performance Metrics Archive Logger. ░░ Subject: A start job for unit pmlogger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has finished successfully. ░░ ░░ The job identifier is 1978. Jun 23 03:00:53 managed-node3 systemd[1]: Starting pmlogger farm service... ░░ Subject: A start job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 2048. Jun 23 03:00:53 managed-node3 systemd[1]: Started pmlogger farm service. ░░ Subject: A start job for unit pmlogger_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has finished successfully. ░░ ░░ The job identifier is 2048. Jun 23 03:00:54 managed-node3 python3.9[26391]: ansible-service_facts Invoked Jun 23 03:00:57 managed-node3 python3.9[27021]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jun 23 03:00:58 managed-node3 python3.9[27196]: ansible-service_facts Invoked Jun 23 03:01:01 managed-node3 CROND[27306]: (root) CMD (run-parts /etc/cron.hourly) Jun 23 03:01:01 managed-node3 run-parts[27309]: (/etc/cron.hourly) starting 0anacron Jun 23 03:01:01 managed-node3 anacron[27317]: Anacron started on 2025-06-23 Jun 23 03:01:01 managed-node3 anacron[27317]: Will run job `cron.daily' in 42 min. Jun 23 03:01:01 managed-node3 anacron[27317]: Will run job `cron.weekly' in 62 min. Jun 23 03:01:01 managed-node3 anacron[27317]: Will run job `cron.monthly' in 82 min. Jun 23 03:01:01 managed-node3 anacron[27317]: Jobs will be executed sequentially Jun 23 03:01:01 managed-node3 run-parts[27319]: (/etc/cron.hourly) finished 0anacron Jun 23 03:01:01 managed-node3 CROND[27305]: (root) CMDEND (run-parts /etc/cron.hourly) Jun 23 03:01:01 managed-node3 python3.9[27447]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jun 23 03:01:02 managed-node3 python3.9[27596]: ansible-ansible.legacy.dnf Invoked with name=['pcp', 'pcp-zeroconf'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jun 23 03:01:03 managed-node3 python3.9[27746]: ansible-ansible.legacy.dnf Invoked with name=['cyrus-sasl-lib', 'cyrus-sasl-scram'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jun 23 03:01:05 managed-node3 python3.9[27896]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/pcp/pmcd/pmcd.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 23 03:01:05 managed-node3 python3.9[28046]: ansible-file Invoked with path=/etc/pcp/labels state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:06 managed-node3 python3.9[28195]: ansible-file Invoked with path=/etc/pcp/labels/optional state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:06 managed-node3 python3.9[28344]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:01:06 managed-node3 python3.9[28419]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/pcp/labels/ansible-managed _original_basename=pmcd.explicit.labels.j2 recurse=False state=file path=/etc/pcp/labels/ansible-managed force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:07 managed-node3 python3.9[28571]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/optional/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:01:07 managed-node3 python3.9[28646]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/pcp/labels/optional/ansible-managed _original_basename=pmcd.implicit.labels.j2 recurse=False state=file path=/etc/pcp/labels/optional/ansible-managed force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:07 managed-node3 python3.9[28795]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmcd follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:01:08 managed-node3 python3.9[28870]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmcd _original_basename=pmcd.defaults.j2 recurse=False state=file path=/etc/sysconfig/pmcd force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:08 managed-node3 python3.9[29019]: ansible-user Invoked with name=pcptest system=True state=present non_unique=False force=False remove=False create_home=True move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node3 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Jun 23 03:01:08 managed-node3 useradd[29021]: new group: name=pcptest, GID=994 Jun 23 03:01:08 managed-node3 useradd[29021]: new user: name=pcptest, UID=994, GID=994, home=/home/pcptest, shell=/bin/bash, from=/dev/pts/0 Jun 23 03:01:09 managed-node3 python3.9[29176]: ansible-ansible.legacy.command Invoked with _raw_params=set -eu if set -o | grep -q pipefail; then set -o pipefail # pipefail not supported on debian, some ubuntu fi if ! sasldblistusers2 -f "/etc/pcp/passwd.db" | grep -q "^pcptest@"; then echo "Creating new pcptest user in /etc/pcp/passwd.db" echo "t;dlen;dle" | saslpasswd2 -a pmcd "pcptest" chown root:pcp "/etc/pcp/passwd.db" chmod 640 "/etc/pcp/passwd.db" fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 23 03:01:09 managed-node3 python3.9[29334]: ansible-ansible.legacy.stat Invoked with path=/etc/sasl2/pmcd.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:01:09 managed-node3 python3.9[29409]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sasl2/pmcd.conf _original_basename=pmcd.sasl2.conf.j2 recurse=False state=file path=/etc/sasl2/pmcd.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:10 managed-node3 python3.9[29558]: ansible-ansible.legacy.systemd Invoked with name=pmcd state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jun 23 03:01:11 managed-node3 python3.9[29709]: ansible-file Invoked with path=/etc/pcp/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:11 managed-node3 python3.9[29858]: ansible-file Invoked with path=/etc/pcp/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:11 managed-node3 python3.9[30007]: ansible-file Invoked with path=/etc/pcp/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:12 managed-node3 python3.9[30159]: ansible-file Invoked with path=/etc/pcp/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:12 managed-node3 python3.9[30308]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:12 managed-node3 python3.9[30457]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:13 managed-node3 python3.9[30606]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:13 managed-node3 python3.9[30755]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:14 managed-node3 python3.9[30904]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcplistenoverflows follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:01:14 managed-node3 python3.9[30979]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcplistenoverflows _original_basename=tcplistenoverflows recurse=False state=file path=/etc/pcp/pmieconf/network/tcplistenoverflows force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:14 managed-node3 python3.9[31128]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldocookies follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:01:15 managed-node3 python3.9[31203]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcpqfulldocookies _original_basename=tcpqfulldocookies recurse=False state=file path=/etc/pcp/pmieconf/network/tcpqfulldocookies force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:15 managed-node3 python3.9[31352]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldrops follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:01:15 managed-node3 python3.9[31427]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcpqfulldrops _original_basename=tcpqfulldrops recurse=False state=file path=/etc/pcp/pmieconf/network/tcpqfulldrops force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:16 managed-node3 python3.9[31576]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/power/thermal_throttle follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:01:16 managed-node3 python3.9[31651]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/power/thermal_throttle _original_basename=thermal_throttle recurse=False state=file path=/etc/pcp/pmieconf/power/thermal_throttle force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:16 managed-node3 python3.9[31800]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/zeroconf/all_threads follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:01:16 managed-node3 python3.9[31875]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/zeroconf/all_threads _original_basename=all_threads recurse=False state=file path=/etc/pcp/pmieconf/zeroconf/all_threads force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:17 managed-node3 python3.9[32027]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/filesys/vfs_files follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:01:17 managed-node3 python3.9[32102]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/filesys/vfs_files _original_basename=vfs_files recurse=False state=file path=/etc/pcp/pmieconf/filesys/vfs_files force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:18 managed-node3 python3.9[32251]: ansible-lineinfile Invoked with state=absent path=/var/lib/pcp/config/pmie/config.default regexp=//.*global webhook_endpoint = "" backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None line=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:18 managed-node3 python3.9[32400]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcplistenoverflows dest=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:18 managed-node3 python3.9[32549]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldocookies dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:19 managed-node3 python3.9[32698]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldrops dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:19 managed-node3 python3.9[32847]: ansible-file Invoked with src=/etc/pcp/pmieconf/power/thermal_throttle dest=/var/lib/pcp/config/pmieconf/power/thermal_throttle state=link force=True path=/var/lib/pcp/config/pmieconf/power/thermal_throttle recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:19 managed-node3 python3.9[32996]: ansible-file Invoked with src=/etc/pcp/pmieconf/zeroconf/all_threads dest=/var/lib/pcp/config/pmieconf/zeroconf/all_threads state=link force=True path=/var/lib/pcp/config/pmieconf/zeroconf/all_threads recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:20 managed-node3 python3.9[33145]: ansible-file Invoked with src=/etc/pcp/pmieconf/filesys/vfs_files dest=/var/lib/pcp/config/pmieconf/filesys/vfs_files state=link force=True path=/var/lib/pcp/config/pmieconf/filesys/vfs_files recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:20 managed-node3 python3.9[33294]: ansible-ansible.legacy.systemd Invoked with name=pmie state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jun 23 03:01:21 managed-node3 python3.9[33445]: ansible-lineinfile Invoked with path=/etc/pcp.conf regexp=^PCP_ARCHIVE_DIR= line=PCP_ARCHIVE_DIR=/var/log/pcp/pmlogger state=present backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:21 managed-node3 python3.9[33594]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:01:22 managed-node3 python3.9[33672]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmlogger _original_basename=pmlogger.defaults.j2 recurse=False state=file path=/etc/sysconfig/pmlogger force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:22 managed-node3 python3.9[33821]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger_timers follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:01:22 managed-node3 python3.9[33896]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmlogger_timers _original_basename=pmlogger.timers.j2 recurse=False state=file path=/etc/sysconfig/pmlogger_timers force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:01:23 managed-node3 python3.9[34045]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jun 23 03:01:24 managed-node3 python3.9[34196]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jun 23 03:01:24 managed-node3 python3.9[34345]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jun 23 03:01:25 managed-node3 python3.9[34494]: ansible-ansible.legacy.command Invoked with _raw_params=systemctl is-system-running _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 23 03:01:25 managed-node3 python3.9[34644]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jun 23 03:01:27 managed-node3 python3.9[34797]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jun 23 03:01:27 managed-node3 python3.9[34948]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jun 23 03:01:27 managed-node3 systemd[1]: Reloading. Jun 23 03:01:27 managed-node3 systemd-rc-local-generator[34967]: /etc/rc.d/rc.local is not marked executable, skipping. Jun 23 03:01:28 managed-node3 systemd[1]: Starting firewalld - dynamic firewall daemon... ░░ Subject: A start job for unit firewalld.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has begun execution. ░░ ░░ The job identifier is 2049. Jun 23 03:01:28 managed-node3 systemd[1]: Started firewalld - dynamic firewall daemon. ░░ Subject: A start job for unit firewalld.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has finished successfully. ░░ ░░ The job identifier is 2049. Jun 23 03:01:28 managed-node3 kernel: Warning: Unmaintained driver is detected: ip_set Jun 23 03:01:28 managed-node3 kernel: Warning: Unmaintained driver is detected: ip_set_init Jun 23 03:01:29 managed-node3 python3.9[35175]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['44321/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jun 23 03:01:29 managed-node3 python3.9[35325]: ansible-ansible.legacy.command Invoked with _raw_params=systemctl restart pmcd && sleep 5 _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 23 03:01:29 managed-node3 systemd[1]: Stopping Performance Metrics Collector Daemon... ░░ Subject: A stop job for unit pmcd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmcd.service has begun execution. ░░ ░░ The job identifier is 2120. Jun 23 03:01:30 managed-node3 pmcd[19274]: Terminated Jun 23 03:01:30 managed-node3 pmcd[19204]: _pmda_setup: Interrupted! Jun 23 03:01:30 managed-node3 pmcd[19204]: _pmda_setup_cleanup: reset .NeedInstall for dm PMDA Jun 23 03:01:30 managed-node3 systemd[1]: pmcd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmcd.service has successfully entered the 'dead' state. Jun 23 03:01:30 managed-node3 systemd[1]: Stopped Performance Metrics Collector Daemon. ░░ Subject: A stop job for unit pmcd.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmcd.service has finished. ░░ ░░ The job identifier is 2120 and the job result is done. Jun 23 03:01:30 managed-node3 systemd[1]: pmcd.service: Consumed 1.730s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmcd.service completed and consumed the indicated resources. Jun 23 03:01:30 managed-node3 systemd[1]: Starting Performance Metrics Collector Daemon... ░░ Subject: A start job for unit pmcd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has begun execution. ░░ ░░ The job identifier is 2120. Jun 23 03:01:30 managed-node3 systemd[1]: Started Performance Metrics Collector Daemon. ░░ Subject: A start job for unit pmcd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has finished successfully. ░░ ░░ The job identifier is 2120. Jun 23 03:01:30 managed-node3 pmcd[35746]: Installing dm PMDA ... Jun 23 03:01:36 managed-node3 python3.9[35940]: ansible-ansible.legacy.command Invoked with _raw_params=id "pcptest" _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 23 03:01:36 managed-node3 python3.9[36090]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail sasldblistusers2 -f /etc/pcp/passwd.db | grep -wq "pcptest" _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 23 03:01:36 managed-node3 python3.9[36242]: ansible-ansible.legacy.command Invoked with _raw_params=pminfo -f -h "pcp://127.0.0.1?username=pcptest&password=t;dlen;dle" disk.dev.read _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 23 03:01:37 managed-node3 python3.9[36392]: ansible-ansible.legacy.command Invoked with _raw_params=firewall-cmd --list-ports _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 23 03:01:38 managed-node3 python3.9[36542]: ansible-service_facts Invoked Jun 23 03:01:41 managed-node3 python3.9[36775]: ansible-ansible.legacy.systemd Invoked with name=pmcd state=started daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Jun 23 03:01:41 managed-node3 python3.9[36928]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=started daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Jun 23 03:01:42 managed-node3 python3.9[37078]: ansible-ansible.legacy.systemd Invoked with name=pmie state=started daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Jun 23 03:01:42 managed-node3 python3.9[37228]: ansible-ansible.legacy.systemd Invoked with name=pmproxy state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Jun 23 03:01:43 managed-node3 python3.9[37378]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Jun 23 03:01:43 managed-node3 systemd[1]: Stopping firewalld - dynamic firewall daemon... ░░ Subject: A stop job for unit firewalld.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit firewalld.service has begun execution. ░░ ░░ The job identifier is 2185. Jun 23 03:01:43 managed-node3 systemd[1]: firewalld.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit firewalld.service has successfully entered the 'dead' state. Jun 23 03:01:43 managed-node3 systemd[1]: Stopped firewalld - dynamic firewall daemon. ░░ Subject: A stop job for unit firewalld.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit firewalld.service has finished. ░░ ░░ The job identifier is 2185 and the job result is done. Jun 23 03:01:44 managed-node3 python3.9[37579]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jun 23 03:01:45 managed-node3 python3.9[37754]: ansible-service_facts Invoked Jun 23 03:01:47 managed-node3 python3.9[37990]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jun 23 03:01:48 managed-node3 python3.9[38139]: ansible-ansible.legacy.dnf Invoked with name=['pcp-pmda-bpftrace', 'bpftrace'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jun 23 03:02:09 managed-node3 dbus-broker-launch[591]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jun 23 03:02:09 managed-node3 dbus-broker-launch[591]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jun 23 03:02:17 managed-node3 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r823a6771c8b14ef19554dd8f343d2a71.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r823a6771c8b14ef19554dd8f343d2a71.service has finished successfully. ░░ ░░ The job identifier is 2188. Jun 23 03:02:17 managed-node3 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 2253. Jun 23 03:02:22 managed-node3 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jun 23 03:02:22 managed-node3 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 2253. Jun 23 03:02:22 managed-node3 systemd[1]: run-r823a6771c8b14ef19554dd8f343d2a71.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r823a6771c8b14ef19554dd8f343d2a71.service has successfully entered the 'dead' state. Jun 23 03:02:25 managed-node3 python3.9[39489]: ansible-file Invoked with path=/etc/pcp/bpftrace state=directory mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:26 managed-node3 python3.9[39690]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/bpftrace/bpftrace.conf follow=True get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:02:27 managed-node3 python3.9[39853]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1750662146.0718596-13023-220938206642182/.source.conf dest=/etc/pcp/bpftrace/bpftrace.conf mode=0600 follow=True _original_basename=bpftrace.conf.j2 checksum=9db3c77c06fbc635e71a256adf5840e978a74535 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:27 managed-node3 python3.9[40051]: ansible-ansible.legacy.dnf Invoked with name=['pcp', 'pcp-zeroconf'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jun 23 03:02:31 managed-node3 python3.9[40253]: ansible-ansible.legacy.dnf Invoked with name=['cyrus-sasl-lib', 'cyrus-sasl-scram'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jun 23 03:02:32 managed-node3 python3.9[40452]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/pcp/pmcd/pmcd.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 23 03:02:33 managed-node3 python3.9[40651]: ansible-file Invoked with path=/var/lib/pcp/pmdas/bpftrace/.NeedInstall mode=u=rw,g=r,o=r state=touch recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:33 managed-node3 python3.9[40849]: ansible-file Invoked with path=/etc/pcp/labels state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:34 managed-node3 python3.9[41047]: ansible-file Invoked with path=/etc/pcp/labels/optional state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:35 managed-node3 python3.9[41245]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:02:35 managed-node3 python3.9[41345]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/pcp/labels/ansible-managed _original_basename=pmcd.explicit.labels.j2 recurse=False state=file path=/etc/pcp/labels/ansible-managed force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:36 managed-node3 python3.9[41546]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/optional/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:02:36 managed-node3 python3.9[41646]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/pcp/labels/optional/ansible-managed _original_basename=pmcd.implicit.labels.j2 recurse=False state=file path=/etc/pcp/labels/optional/ansible-managed force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:36 managed-node3 python3.9[41844]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmcd follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:02:37 managed-node3 python3.9[41944]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmcd _original_basename=pmcd.defaults.j2 recurse=False state=file path=/etc/sysconfig/pmcd force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:38 managed-node3 python3.9[42142]: ansible-user Invoked with name=pcptest system=True state=present non_unique=False force=False remove=False create_home=True move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node3 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Jun 23 03:02:38 managed-node3 python3.9[42342]: ansible-ansible.legacy.command Invoked with _raw_params=set -eu if set -o | grep -q pipefail; then set -o pipefail # pipefail not supported on debian, some ubuntu fi if ! sasldblistusers2 -f "/etc/pcp/passwd.db" | grep -q "^pcptest@"; then echo "Creating new pcptest user in /etc/pcp/passwd.db" echo "t;dlen;dle" | saslpasswd2 -a pmcd "pcptest" chown root:pcp "/etc/pcp/passwd.db" chmod 640 "/etc/pcp/passwd.db" fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 23 03:02:39 managed-node3 python3.9[42545]: ansible-ansible.legacy.stat Invoked with path=/etc/sasl2/pmcd.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:02:39 managed-node3 python3.9[42645]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sasl2/pmcd.conf _original_basename=pmcd.sasl2.conf.j2 recurse=False state=file path=/etc/sasl2/pmcd.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:40 managed-node3 python3.9[42843]: ansible-ansible.legacy.systemd Invoked with name=pmcd state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jun 23 03:02:40 managed-node3 systemd[1]: Stopping Performance Metrics Collector Daemon... ░░ Subject: A stop job for unit pmcd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmcd.service has begun execution. ░░ ░░ The job identifier is 2318. Jun 23 03:02:41 managed-node3 pmcd[35747]: Terminated Jun 23 03:02:41 managed-node3 pmcd[35701]: _pmda_setup: Interrupted! Jun 23 03:02:41 managed-node3 pmcd[35701]: _pmda_setup_cleanup: reset .NeedInstall for dm PMDA Jun 23 03:02:41 managed-node3 systemd[1]: pmcd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmcd.service has successfully entered the 'dead' state. Jun 23 03:02:41 managed-node3 systemd[1]: Stopped Performance Metrics Collector Daemon. ░░ Subject: A stop job for unit pmcd.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmcd.service has finished. ░░ ░░ The job identifier is 2318 and the job result is done. Jun 23 03:02:41 managed-node3 systemd[1]: Starting Performance Metrics Collector Daemon... ░░ Subject: A start job for unit pmcd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has begun execution. ░░ ░░ The job identifier is 2318. Jun 23 03:02:41 managed-node3 systemd[1]: Started Performance Metrics Collector Daemon. ░░ Subject: A start job for unit pmcd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has finished successfully. ░░ ░░ The job identifier is 2318. Jun 23 03:02:41 managed-node3 pmcd[43304]: Installing bpftrace PMDA ... Jun 23 03:02:42 managed-node3 python3.9[43528]: ansible-file Invoked with path=/etc/pcp/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:42 managed-node3 python3.9[43726]: ansible-file Invoked with path=/etc/pcp/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:43 managed-node3 python3.9[43924]: ansible-file Invoked with path=/etc/pcp/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:44 managed-node3 python3.9[44122]: ansible-file Invoked with path=/etc/pcp/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:44 managed-node3 python3.9[44320]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:45 managed-node3 python3.9[44518]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:45 managed-node3 python3.9[44716]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:46 managed-node3 python3.9[44914]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:46 managed-node3 python3.9[45112]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcplistenoverflows follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:02:47 managed-node3 python3.9[45212]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcplistenoverflows _original_basename=tcplistenoverflows recurse=False state=file path=/etc/pcp/pmieconf/network/tcplistenoverflows force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:47 managed-node3 python3.9[45413]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldocookies follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:02:48 managed-node3 python3.9[45513]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcpqfulldocookies _original_basename=tcpqfulldocookies recurse=False state=file path=/etc/pcp/pmieconf/network/tcpqfulldocookies force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:48 managed-node3 python3.9[45711]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldrops follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:02:49 managed-node3 python3.9[45811]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcpqfulldrops _original_basename=tcpqfulldrops recurse=False state=file path=/etc/pcp/pmieconf/network/tcpqfulldrops force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:49 managed-node3 python3.9[46009]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/power/thermal_throttle follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:02:50 managed-node3 python3.9[46109]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/power/thermal_throttle _original_basename=thermal_throttle recurse=False state=file path=/etc/pcp/pmieconf/power/thermal_throttle force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:50 managed-node3 python3.9[46307]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/zeroconf/all_threads follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:02:50 managed-node3 python3.9[46407]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/zeroconf/all_threads _original_basename=all_threads recurse=False state=file path=/etc/pcp/pmieconf/zeroconf/all_threads force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:51 managed-node3 python3.9[46605]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/filesys/vfs_files follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:02:51 managed-node3 python3.9[46705]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/filesys/vfs_files _original_basename=vfs_files recurse=False state=file path=/etc/pcp/pmieconf/filesys/vfs_files force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:52 managed-node3 python3.9[46906]: ansible-lineinfile Invoked with state=absent path=/var/lib/pcp/config/pmie/config.default regexp=//.*global webhook_endpoint = "" backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None line=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:53 managed-node3 python3.9[47104]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcplistenoverflows dest=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:53 managed-node3 python3.9[47302]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldocookies dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:54 managed-node3 python3.9[47500]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldrops dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:54 managed-node3 python3.9[47698]: ansible-file Invoked with src=/etc/pcp/pmieconf/power/thermal_throttle dest=/var/lib/pcp/config/pmieconf/power/thermal_throttle state=link force=True path=/var/lib/pcp/config/pmieconf/power/thermal_throttle recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:55 managed-node3 python3.9[47896]: ansible-file Invoked with src=/etc/pcp/pmieconf/zeroconf/all_threads dest=/var/lib/pcp/config/pmieconf/zeroconf/all_threads state=link force=True path=/var/lib/pcp/config/pmieconf/zeroconf/all_threads recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:56 managed-node3 python3.9[48094]: ansible-file Invoked with src=/etc/pcp/pmieconf/filesys/vfs_files dest=/var/lib/pcp/config/pmieconf/filesys/vfs_files state=link force=True path=/var/lib/pcp/config/pmieconf/filesys/vfs_files recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:56 managed-node3 python3.9[48292]: ansible-ansible.legacy.systemd Invoked with name=pmie state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jun 23 03:02:56 managed-node3 systemd[1]: Stopping pmie farm service... ░░ Subject: A stop job for unit pmie_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie_farm.service has begun execution. ░░ ░░ The job identifier is 2453. Jun 23 03:02:56 managed-node3 systemd[1]: pmie_farm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmie_farm.service has successfully entered the 'dead' state. Jun 23 03:02:56 managed-node3 systemd[1]: Stopped pmie farm service. ░░ Subject: A stop job for unit pmie_farm.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie_farm.service has finished. ░░ ░░ The job identifier is 2453 and the job result is done. Jun 23 03:02:56 managed-node3 systemd[1]: Stopping Performance Metrics Inference Engine... ░░ Subject: A stop job for unit pmie.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie.service has begun execution. ░░ ░░ The job identifier is 2383. Jun 23 03:02:57 managed-node3 systemd[1]: pmie.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmie.service has successfully entered the 'dead' state. Jun 23 03:02:57 managed-node3 systemd[1]: Stopped Performance Metrics Inference Engine. ░░ Subject: A stop job for unit pmie.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie.service has finished. ░░ ░░ The job identifier is 2383 and the job result is done. Jun 23 03:02:57 managed-node3 systemd[1]: Starting Performance Metrics Inference Engine... ░░ Subject: A start job for unit pmie.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie.service has begun execution. ░░ ░░ The job identifier is 2383. Jun 23 03:02:57 managed-node3 systemd[1]: Started Performance Metrics Inference Engine. ░░ Subject: A start job for unit pmie.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie.service has finished successfully. ░░ ░░ The job identifier is 2383. Jun 23 03:02:57 managed-node3 systemd[1]: Starting pmie farm service... ░░ Subject: A start job for unit pmie_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm.service has begun execution. ░░ ░░ The job identifier is 2453. Jun 23 03:02:57 managed-node3 systemd[1]: Started pmie farm service. ░░ Subject: A start job for unit pmie_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm.service has finished successfully. ░░ ░░ The job identifier is 2453. Jun 23 03:02:57 managed-node3 python3.9[49020]: ansible-lineinfile Invoked with path=/etc/pcp.conf regexp=^PCP_ARCHIVE_DIR= line=PCP_ARCHIVE_DIR=/var/log/pcp/pmlogger state=present backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:58 managed-node3 python3.9[49218]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:02:58 managed-node3 python3.9[49318]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmlogger _original_basename=pmlogger.defaults.j2 recurse=False state=file path=/etc/sysconfig/pmlogger force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:02:59 managed-node3 python3.9[49516]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger_timers follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 23 03:02:59 managed-node3 python3.9[49616]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmlogger_timers _original_basename=pmlogger.timers.j2 recurse=False state=file path=/etc/sysconfig/pmlogger_timers force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 23 03:03:00 managed-node3 python3.9[49814]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jun 23 03:03:01 managed-node3 python3.9[50014]: ansible-ansible.legacy.command Invoked with _raw_params=pmprobe -I pmcd.agent.status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 23 03:03:03 managed-node3 python3.9[50216]: ansible-ansible.legacy.command Invoked with _raw_params=pmprobe -I pmcd.agent.status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 23 03:03:04 managed-node3 python3.9[50415]: ansible-ansible.legacy.command Invoked with _raw_params=pmprobe -I pmcd.agent.status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 23 03:03:07 managed-node3 python3.9[50614]: ansible-ansible.legacy.command Invoked with _raw_params=pmprobe -I pmcd.agent.status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 23 03:03:08 managed-node3 python3.9[50816]: ansible-ansible.legacy.command Invoked with _raw_params=pmprobe -I pmcd.agent.status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 23 03:03:10 managed-node3 python3.9[51015]: ansible-ansible.legacy.command Invoked with _raw_params=pmprobe -I pmcd.agent.status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 23 03:03:11 managed-node3 python3.9[51214]: ansible-ansible.legacy.command Invoked with _raw_params=pmprobe -I pmcd.agent.status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 23 03:03:13 managed-node3 python3.9[51416]: ansible-ansible.legacy.command Invoked with _raw_params=pmprobe -I pmcd.agent.status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 23 03:03:15 managed-node3 python3.9[51615]: ansible-ansible.legacy.command Invoked with _raw_params=pmprobe -I pmcd.agent.status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 23 03:03:16 managed-node3 python3.9[51814]: ansible-ansible.legacy.command Invoked with _raw_params=pmprobe -I pmcd.agent.status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 23 03:03:18 managed-node3 python3.9[52016]: ansible-ansible.legacy.command Invoked with _raw_params=pmprobe -I pmcd.agent.status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 23 03:03:18 managed-node3 python3.9[52215]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex echo '##################' echo List of SELinux AVCs - note list may be empty grep type=AVC /var/log/audit/audit.log echo '##################' ls -alrtF /run if [ -d /run/pcp ]; then ls -alrtF /run/pcp else echo ERROR - /run/pcp does not exist fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None ################## List of SELinux AVCs - note list may be empty ################## total 40 dr-xr-xr-x. 18 root root 235 Jun 3 05:25 ../ drwxr-xr-x. 2 root root 60 Jun 23 02:56 tmpfiles.d/ drwxr-xr-x. 3 root root 60 Jun 23 02:56 log/ drwxr-xr-x. 2 root root 40 Jun 23 02:56 mount/ drwxr-xr-x. 4 root root 100 Jun 23 02:56 initramfs/ -r--r--r--. 1 root root 33 Jun 23 02:56 machine-id srw-rw-rw-. 1 root root 0 Jun 23 02:56 rpcbind.sock= prw-------. 1 root root 0 Jun 23 02:56 initctl| drwxr-xr-x. 5 root root 100 Jun 23 02:56 credentials/ drwx------. 2 root root 40 Jun 23 02:56 cryptsetup/ drwxr-xr-x. 2 root root 40 Jun 23 02:56 setrans/ drwxr-xr-x. 2 root root 40 Jun 23 02:56 sepermit/ drwxr-xr-x. 2 root root 40 Jun 23 02:56 motd.d/ drwxr-xr-x. 2 root root 40 Jun 23 02:56 faillock/ drwxr-xr-x. 2 root root 40 Jun 23 02:56 console/ drwx--x--x. 3 root root 60 Jun 23 02:56 sudo/ -rw-r--r--. 1 root root 0 Jun 23 02:56 motd drwxr-xr-x. 3 root root 60 Jun 23 02:56 tpm2-tss/ drwx------. 2 rpc rpc 60 Jun 23 02:56 rpcbind/ -rw-r--r--. 1 root root 4 Jun 23 02:56 auditd.pid drwxr-xr-x. 2 root root 60 Jun 23 02:56 dbus/ srw-rw-rw-. 1 root root 0 Jun 23 02:56 .heim_org.h5l.kcm-socket= drwxr-xr-x. 2 root root 60 Jun 23 02:56 irqbalance/ -rw-r--r--. 1 root root 4 Jun 23 02:56 dhclient.pid -rw-r--r--. 1 root root 628 Jun 23 02:56 dhclient.lease -rw-------. 1 root root 4 Jun 23 02:56 gssproxy.pid srw-rw-rw-. 1 root root 0 Jun 23 02:56 gssproxy.sock= drwxr-xr-x. 2 root root 60 Jun 23 02:56 chrony-dhcp/ drwxr-x---. 2 chrony chrony 80 Jun 23 02:56 chrony/ -rw-------. 1 root root 4 Jun 23 02:56 sm-notify.pid drwxr-xr-x. 3 root root 80 Jun 23 02:56 lock/ -rw-r--r--. 1 root root 4 Jun 23 02:56 crond.pid -rw-------. 1 root root 3 Jun 23 02:56 rsyslogd.pid ----------. 1 root root 0 Jun 23 02:56 cron.reboot -rw-r--r--. 1 root root 4 Jun 23 02:56 sshd.pid drwx------. 3 root root 360 Jun 23 02:56 cloud-init/ drwxr-xr-x. 2 root root 80 Jun 23 02:56 blkid/ -rw-------. 1 root root 0 Jun 23 02:56 agetty.reload drwxr-xr-x. 3 root root 60 Jun 23 02:58 user/ drwxr-xr-x. 6 root root 160 Jun 23 02:58 NetworkManager/ drwxr-xr-x. 22 root root 560 Jun 23 03:01 systemd/ drwxr-xr-x. 29 root root 920 Jun 23 03:01 ./ drwxr-x---. 2 root root 40 Jun 23 03:01 firewalld/ drwxr-xr-x. 7 root root 160 Jun 23 03:01 udev/ drwxrwxr-x. 2 pcp pcp 160 Jun 23 03:02 pcp/ -rw-rw-r--. 1 root utmp 1920 Jun 23 03:03 utmp total 12 -r--r--r--. 1 pcp pcp 5 Jun 23 03:00 pmlogger.pid lrwxrwxrwx. 1 pcp pcp 30 Jun 23 03:00 pmlogger.primary.socket -> /run/pcp/pmlogger.25964.socket= srw-rw-rw-. 1 pcp pcp 0 Jun 23 03:00 pmlogger.25964.socket= drwxr-xr-x. 29 root root 920 Jun 23 03:01 ../ srw-rw-rw-. 1 root root 0 Jun 23 03:02 pmcd.socket= -r--r--r--. 1 root root 5 Jun 23 03:02 pmcd.pid -r--r--r--. 1 pcp pcp 5 Jun 23 03:02 pmie.pid drwxrwxr-x. 2 pcp pcp 160 Jun 23 03:02 ./ TASK [Reraise error] *********************************************************** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml:17 Monday 23 June 2025 03:03:19 -0400 (0:00:00.657) 0:01:34.817 *********** fatal: [managed-node3]: FAILED! => { "changed": false } MSG: {'changed': False, 'stdout': 'pmcd.agent.status 9 "root" "pmcd" "proc" "pmproxy" "xfs" "linux" "mmv" "kvm" "jbd2"', 'stderr': '', 'rc': 0, 'cmd': ['pmprobe', '-I', 'pmcd.agent.status'], 'start': '2025-06-23 03:03:18.205064', 'end': '2025-06-23 03:03:18.213144', 'delta': '0:00:00.008080', 'msg': '', 'invocation': {'module_args': {'_raw_params': 'pmprobe -I pmcd.agent.status', '_uses_shell': False, 'expand_argument_vars': True, 'stdin_add_newline': True, 'strip_empty_ends': True, 'argv': None, 'chdir': None, 'executable': None, 'creates': None, 'removes': None, 'stdin': None}}, 'stdout_lines': ['pmcd.agent.status 9 "root" "pmcd" "proc" "pmproxy" "xfs" "linux" "mmv" "kvm" "jbd2"'], 'stderr_lines': [], '_ansible_no_log': False, 'failed': True, 'attempts': 10} TASK [Get final state of services] ********************************************* task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:3 Monday 23 June 2025 03:03:19 -0400 (0:00:00.025) 0:01:34.843 *********** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "apt-daily.service": { "name": "apt-daily.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "avahi-daemon.service": { "name": "avahi-daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcp-reboot-init.service": { "name": "pcp-reboot-init.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pmcd.service": { "name": "pmcd.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmfind.service": { "name": "pmfind.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pmie.service": { "name": "pmie.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmie_check.service": { "name": "pmie_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmie_daily.service": { "name": "pmie_daily.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmie_farm.service": { "name": "pmie_farm.service", "source": "systemd", "state": "running", "status": "disabled" }, "pmie_farm_check.service": { "name": "pmie_farm_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger.service": { "name": "pmlogger.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmlogger_check.service": { "name": "pmlogger_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger_daily.service": { "name": "pmlogger_daily.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger_farm.service": { "name": "pmlogger_farm.service", "source": "systemd", "state": "running", "status": "disabled" }, "pmlogger_farm_check.service": { "name": "pmlogger_farm_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmproxy.service": { "name": "pmproxy.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "redis.service": { "name": "redis.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles.service": { "name": "systemd-tmpfiles.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "zabbix-agent.service": { "name": "zabbix-agent.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [Restore state of services] *********************************************** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:9 Monday 23 June 2025 03:03:20 -0400 (0:00:01.896) 0:01:36.740 *********** ok: [managed-node3] => (item=pmcd) => { "ansible_loop_var": "item", "changed": false, "item": "pmcd", "name": "pmcd", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Mon 2025-06-23 03:02:41 EDT", "ActiveEnterTimestampMonotonic": "385019330", "ActiveExitTimestamp": "Mon 2025-06-23 03:02:40 EDT", "ActiveExitTimestampMonotonic": "384225625", "ActiveState": "active", "After": "systemd-journald.socket system.slice network-online.target pcp-reboot-init.service basic.target avahi-daemon.service sysinit.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Mon 2025-06-23 03:02:41 EDT", "AssertTimestampMonotonic": "384897724", "Before": "pmie.service pmlogger.service multi-user.target pmproxy.service zabbix-agent.service shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "816385000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Mon 2025-06-23 03:02:41 EDT", "ConditionTimestampMonotonic": "384897720", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/pmcd.service", "ControlGroupId": "5371", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Collector Daemon", "DevicePolicy": "auto", "Documentation": "\"man:pmcd(1)\"", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "43204", "ExecMainStartTimestamp": "Mon 2025-06-23 03:02:41 EDT", "ExecMainStartTimestampMonotonic": "385019296", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd start-systemd ; ignore_errors=no ; start_time=[Mon 2025-06-23 03:02:41 EDT] ; stop_time=[n/a] ; pid=43130 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd start-systemd ; flags= ; start_time=[Mon 2025-06-23 03:02:41 EDT] ; stop_time=[n/a] ; pid=43130 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd stop-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd stop-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pmcd.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pmcd.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Mon 2025-06-23 03:02:41 EDT", "InactiveEnterTimestampMonotonic": "384897060", "InactiveExitTimestamp": "Mon 2025-06-23 03:02:41 EDT", "InactiveExitTimestampMonotonic": "384901425", "InvocationID": "9e0fe1545f8f4c5b846bd45a49817eab", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13688", "LimitNPROCSoft": "13688", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13688", "LimitSIGPENDINGSoft": "13688", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "43204", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "13918208", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pmcd.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/pcp/pmcd.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Mon 2025-06-23 03:02:41 EDT", "StateChangeTimestampMonotonic": "385019330", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "10", "TasksMax": "21900", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "pmie.service multi-user.target pmlogger.service", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } ok: [managed-node3] => (item=pmlogger) => { "ansible_loop_var": "item", "changed": false, "item": "pmlogger", "name": "pmlogger", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Mon 2025-06-23 03:00:53 EDT", "ActiveEnterTimestampMonotonic": "276533087", "ActiveExitTimestamp": "Mon 2025-06-23 03:00:52 EDT", "ActiveExitTimestampMonotonic": "275811394", "ActiveState": "active", "After": "systemd-journald.socket pmcd.service basic.target sysinit.target system.slice pcp-reboot-init.service network-online.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Mon 2025-06-23 03:00:52 EDT", "AssertTimestampMonotonic": "275889370", "Before": "multi-user.target pmlogger_daily.timer pmlogger_farm.service pmlogger_check.timer shutdown.target", "BindsTo": "pmlogger_daily.timer pmlogger_farm.service pmlogger_check.timer", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "1305571000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Mon 2025-06-23 03:00:52 EDT", "ConditionTimestampMonotonic": "275889367", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pmlogger_farm.service", "ControlGroup": "/system.slice/pmlogger.service", "ControlGroupId": "4947", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Archive Logger", "DevicePolicy": "auto", "Documentation": "\"man:pmlogger(1)\"", "DynamicUser": "no", "Environment": "PMLOGGER_CHECK_PARAMS=--only-primary", "EnvironmentFiles": "/etc/sysconfig/pmlogger (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "25964", "ExecMainStartTimestamp": "Mon 2025-06-23 03:00:53 EDT", "ExecMainStartTimestampMonotonic": "276533053", "ExecMainStatus": "0", "ExecStart": "{ path=/etc/pcp/pmlogger/rc ; argv[]=/etc/pcp/pmlogger/rc start-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/etc/pcp/pmlogger/rc ; argv[]=/etc/pcp/pmlogger/rc start-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/etc/pcp/pmlogger/rc ; argv[]=/etc/pcp/pmlogger/rc stop-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/etc/pcp/pmlogger/rc ; argv[]=/etc/pcp/pmlogger/rc stop-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pmlogger.service", "FreezerState": "running", "GID": "996", "Group": "pcp", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pmlogger.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Mon 2025-06-23 03:00:52 EDT", "InactiveEnterTimestampMonotonic": "275888345", "InactiveExitTimestamp": "Mon 2025-06-23 03:00:52 EDT", "InactiveExitTimestampMonotonic": "275893367", "InvocationID": "124674f0665f4f1f96a11a28632b1038", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13688", "LimitNPROCSoft": "13688", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13688", "LimitSIGPENDINGSoft": "13688", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "25964", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "3710976", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pmlogger.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/pcp/pmlogger.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Mon 2025-06-23 03:00:53 EDT", "StateChangeTimestampMonotonic": "276533087", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "21900", "TimeoutAbortUSec": "2min", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "2min", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "2min", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "996", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "User": "pcp", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "pmcd.service", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } ok: [managed-node3] => (item=pmie) => { "ansible_loop_var": "item", "changed": false, "item": "pmie", "name": "pmie", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Mon 2025-06-23 03:02:57 EDT", "ActiveEnterTimestampMonotonic": "400610274", "ActiveExitTimestamp": "Mon 2025-06-23 03:02:56 EDT", "ActiveExitTimestampMonotonic": "400148426", "ActiveState": "active", "After": "pcp-reboot-init.service network-online.target pmcd.service basic.target sysinit.target system.slice systemd-journald.socket", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Mon 2025-06-23 03:02:57 EDT", "AssertTimestampMonotonic": "400391103", "Before": "shutdown.target pmie_farm.service pmie_check.timer multi-user.target pmie_daily.timer", "BindsTo": "pmie_farm.service pmie_check.timer pmie_daily.timer", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "300419000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Mon 2025-06-23 03:02:57 EDT", "ConditionTimestampMonotonic": "400391100", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pmie_farm.service", "ControlGroup": "/system.slice/pmie.service", "ControlGroupId": "5410", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Inference Engine", "DevicePolicy": "auto", "Documentation": "\"man:pmie(1)\"", "DynamicUser": "no", "Environment": "PMIE_CHECK_PARAMS=--only-primary", "EnvironmentFiles": "/etc/sysconfig/pmie (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "48681", "ExecMainStartTimestamp": "Mon 2025-06-23 03:02:57 EDT", "ExecMainStartTimestampMonotonic": "400610240", "ExecMainStatus": "0", "ExecStart": "{ path=/etc/pcp/pmie/rc ; argv[]=/etc/pcp/pmie/rc start-systemd ; ignore_errors=no ; start_time=[Mon 2025-06-23 03:02:57 EDT] ; stop_time=[n/a] ; pid=48497 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/etc/pcp/pmie/rc ; argv[]=/etc/pcp/pmie/rc start-systemd ; flags= ; start_time=[Mon 2025-06-23 03:02:57 EDT] ; stop_time=[n/a] ; pid=48497 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/etc/pcp/pmie/rc ; argv[]=/etc/pcp/pmie/rc stop-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/etc/pcp/pmie/rc ; argv[]=/etc/pcp/pmie/rc stop-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pmie.service", "FreezerState": "running", "GID": "996", "Group": "pcp", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pmie.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Mon 2025-06-23 03:02:57 EDT", "InactiveEnterTimestampMonotonic": "400390313", "InactiveExitTimestamp": "Mon 2025-06-23 03:02:57 EDT", "InactiveExitTimestampMonotonic": "400398387", "InvocationID": "74696234351d430787ca249cc998e728", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13688", "LimitNPROCSoft": "13688", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13688", "LimitSIGPENDINGSoft": "13688", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "48681", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "1830912", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pmie.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/pcp/pmie.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Mon 2025-06-23 03:02:57 EDT", "StateChangeTimestampMonotonic": "400610274", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "21900", "TimeoutAbortUSec": "2min", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "2min", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "2min", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "996", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "User": "pcp", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "pmcd.service", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } ok: [managed-node3] => (item=pmproxy) => { "ansible_loop_var": "item", "changed": false, "item": "pmproxy", "name": "pmproxy", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "network-online.target sysinit.target system.slice systemd-journald.socket basic.target redis.service pcp-reboot-init.service pmcd.service avahi-daemon.service", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "0", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Proxy for Performance Metrics Collector Daemon", "DevicePolicy": "auto", "Documentation": "\"man:pmproxy(1)\"", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/etc/pcp/pmproxy/rc ; argv[]=/etc/pcp/pmproxy/rc start-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/etc/pcp/pmproxy/rc ; argv[]=/etc/pcp/pmproxy/rc start-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pmproxy.service", "FreezerState": "running", "GID": "[not set]", "Group": "pcp", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pmproxy.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13688", "LimitNPROCSoft": "13688", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13688", "LimitSIGPENDINGSoft": "13688", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "Markers": "needs-restart", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pmproxy.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Mon 2025-06-23 03:00:19 EDT", "StateChangeTimestampMonotonic": "242727633", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "21900", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "User": "pcp", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } skipping: [managed-node3] => (item=redis) => { "ansible_loop_var": "item", "changed": false, "false_condition": "initial_state.ansible_facts.services[item + '.service']['status'] != 'not-found'", "item": "redis", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=valkey) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item + '.service' in final_state.ansible_facts.services", "item": "valkey", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=grafana-server) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item + '.service' in final_state.ansible_facts.services", "item": "grafana-server", "skip_reason": "Conditional result was False" } TASK [Stop firewall] *********************************************************** task path: /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:29 Monday 23 June 2025 03:03:23 -0400 (0:00:02.812) 0:01:39.553 *********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_manage_firewall | bool", "skip_reason": "Conditional result was False" } PLAY RECAP ********************************************************************* managed-node3 : ok=59 changed=6 unreachable=0 failed=1 skipped=33 rescued=1 ignored=0 SYSTEM ROLES ERRORS BEGIN v1 [ { "ansible_version": "2.17.12", "attempts": 10, "delta": "0:00:00.008080", "end_time": "2025-06-23 03:03:18.213144", "host": "managed-node3", "message": "", "rc": 0, "start_time": "2025-06-23 03:03:18.205064", "stdout": "pmcd.agent.status 9 \"root\" \"pmcd\" \"proc\" \"pmproxy\" \"xfs\" \"linux\" \"mmv\" \"kvm\" \"jbd2\"", "task_name": "Check if bpftrace pmda is registered", "task_path": "/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/check_bpftrace.yml:3" }, { "ansible_version": "2.17.12", "end_time": "2025-06-23T07:03:19.038147+00:00Z", "host": "managed-node3", "message": { "_ansible_no_log": false, "attempts": 10, "changed": false, "cmd": [ "pmprobe", "-I", "pmcd.agent.status" ], "delta": "0:00:00.008080", "end": "2025-06-23 03:03:18.213144", "failed": true, "invocation": { "module_args": { "_raw_params": "pmprobe -I pmcd.agent.status", "_uses_shell": false, "argv": null, "chdir": null, "creates": null, "executable": null, "expand_argument_vars": true, "removes": null, "stdin": null, "stdin_add_newline": true, "strip_empty_ends": true } }, "msg": "", "rc": 0, "start": "2025-06-23 03:03:18.205064", "stderr": "", "stderr_lines": [], "stdout": "pmcd.agent.status 9 \"root\" \"pmcd\" \"proc\" \"pmproxy\" \"xfs\" \"linux\" \"mmv\" \"kvm\" \"jbd2\"", "stdout_lines": [ "pmcd.agent.status 9 \"root\" \"pmcd\" \"proc\" \"pmproxy\" \"xfs\" \"linux\" \"mmv\" \"kvm\" \"jbd2\"" ] }, "start_time": "2025-06-23T07:03:19.021913+00:00Z", "task_name": "Reraise error", "task_path": "/tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml:17" } ] SYSTEM ROLES ERRORS END v1 TASKS RECAP ******************************************************************** Monday 23 June 2025 03:03:23 -0400 (0:00:00.012) 0:01:39.565 *********** =============================================================================== fedora.linux_system_roles.private_metrics_subrole_bpftrace : Install needed bpftrace metrics packages -- 37.08s /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:41 Check if bpftrace pmda is registered ----------------------------------- 17.06s /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/check_bpftrace.yml:3 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra performance rules are installed for targeted hosts --- 5.44s /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:24 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra rules symlinks have been created for targeted hosts --- 3.36s /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:86 fedora.linux_system_roles.private_metrics_subrole_pcp : Install Performance Co-Pilot packages --- 3.33s /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:27 Restore state of services ----------------------------------------------- 2.81s /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:9 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra performance rule group directories exist --- 2.30s /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:4 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra performance rule group link directories exist --- 2.30s /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:14 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector is restarted and enabled on boot --- 1.98s /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:117 Get final state of services --------------------------------------------- 1.90s /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:3 Get initial state of services ------------------------------------------- 1.79s /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/get_services_state.yml:3 fedora.linux_system_roles.private_metrics_subrole_pcp : Install authentication packages --- 1.50s /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:33 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric inference is restarted and enabled on boot --- 1.26s /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:127 fedora.linux_system_roles.private_metrics_subrole_bpftrace : Ensure PCP bpftrace agent is configured --- 1.17s /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_bpftrace/tasks/main.yml:61 Gathering Facts --------------------------------------------------------- 1.09s /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_bpftrace.yml:9 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric logging is configured --- 1.01s /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:12 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric logging retention period is set --- 0.96s /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:19 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector is configured --- 0.96s /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:53 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector authentication is configured --- 0.95s /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:86 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure any explicit metric labels are configured --- 0.93s /tmp/collections-qED/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:39