ansible-playbook [core 2.17.5] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-kpy executable location = /usr/local/bin/ansible-playbook python version = 3.12.6 (main, Sep 9 2024, 00:00:00) [GCC 11.5.0 20240719 (Red Hat 11.5.0-2)] (/usr/bin/python3.12) jinja version = 3.1.4 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles statically imported: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/tests/metrics/get_services_state.yml statically imported: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_verify_query.yml *********************************************** 2 plays in /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_query.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_query.yml:5 Wednesday 09 October 2024 02:04:56 -0400 (0:00:00.011) 0:00:00.011 ***** ok: [managed-node3] => { "ansible_facts": { "pcptest_pw": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n65343431623161346664373330646165636437656265656632613961363839303132393064663934\n3137396633373562393466633037356533326566343338350a386238333034336162333932313162\n62643937336534356131376134303463306466316433366636643562633637376336653034646334\n3063663466333735390a333330366461386166633233373133326237323663333831653232646566\n3363\n" } }, "ansible_included_var_files": [ "/tmp/metrics-kug/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Test the role with query service enabled] ******************************** TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_query.yml:9 Wednesday 09 October 2024 02:04:56 -0400 (0:00:00.036) 0:00:00.048 ***** [WARNING]: Platform linux on host managed-node3 is using the discovered Python interpreter at /usr/bin/python3.9, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node3] TASK [End test] **************************************************************** task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_query.yml:15 Wednesday 09 October 2024 02:04:57 -0400 (0:00:01.232) 0:00:01.280 ***** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [Get initial state of services] ******************************************* task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/tests/metrics/get_services_state.yml:3 Wednesday 09 October 2024 02:04:57 -0400 (0:00:00.063) 0:00:01.343 ***** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "avahi-daemon.service": { "name": "avahi-daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grafana-server.service": { "name": "grafana-server.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcp-reboot-init.service": { "name": "pcp-reboot-init.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pmcd.service": { "name": "pmcd.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmfind.service": { "name": "pmfind.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pmie.service": { "name": "pmie.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmie_check.service": { "name": "pmie_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmie_daily.service": { "name": "pmie_daily.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmie_farm.service": { "name": "pmie_farm.service", "source": "systemd", "state": "running", "status": "disabled" }, "pmie_farm_check.service": { "name": "pmie_farm_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger.service": { "name": "pmlogger.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmlogger_check.service": { "name": "pmlogger_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger_daily.service": { "name": "pmlogger_daily.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger_farm.service": { "name": "pmlogger_farm.service", "source": "systemd", "state": "running", "status": "disabled" }, "pmlogger_farm_check.service": { "name": "pmlogger_farm_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmproxy.service": { "name": "pmproxy.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "redis.service": { "name": "redis.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "snapd.seeded.service": { "name": "snapd.seeded.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles.service": { "name": "systemd-tmpfiles.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "zabbix-agent.service": { "name": "zabbix-agent.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [Run the role] ************************************************************ task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_query.yml:25 Wednesday 09 October 2024 02:04:59 -0400 (0:00:01.811) 0:00:03.154 ***** included: fedora.linux_system_roles.metrics for managed-node3 TASK [fedora.linux_system_roles.metrics : Ensure ansible_facts used by role] *** task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:3 Wednesday 09 October 2024 02:04:59 -0400 (0:00:00.045) 0:00:03.199 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__metrics_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add Elasticsearch to metrics domain list] *** task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:8 Wednesday 09 October 2024 02:04:59 -0400 (0:00:00.021) 0:00:03.221 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_elasticsearch | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add SQL Server to metrics domain list] *** task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:13 Wednesday 09 October 2024 02:04:59 -0400 (0:00:00.019) 0:00:03.241 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_mssql | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add Postfix to metrics domain list] *** task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:18 Wednesday 09 October 2024 02:04:59 -0400 (0:00:00.026) 0:00:03.267 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_postfix | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add bpftrace to metrics domain list] *** task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:23 Wednesday 09 October 2024 02:04:59 -0400 (0:00:00.026) 0:00:03.293 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_bpftrace | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Setup metrics access for roles] ****** task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:28 Wednesday 09 October 2024 02:04:59 -0400 (0:00:00.019) 0:00:03.313 ***** ok: [managed-node3] => { "ansible_facts": { "__metrics_accounts": [ { "saslpassword": "metrics", "sasluser": "metrics", "user": "metrics" } ] }, "changed": false } TASK [Configure Elasticsearch metrics] ***************************************** task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:35 Wednesday 09 October 2024 02:04:59 -0400 (0:00:00.026) 0:00:03.340 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_elasticsearch | d(false) | bool or metrics_into_elasticsearch | d(false) | bool\n", "skip_reason": "Conditional result was False" } TASK [Configure SQL Server metrics.] ******************************************* task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:50 Wednesday 09 October 2024 02:04:59 -0400 (0:00:00.022) 0:00:03.362 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_mssql | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [Configure Postfix metrics.] ********************************************** task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:58 Wednesday 09 October 2024 02:04:59 -0400 (0:00:00.019) 0:00:03.382 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_postfix | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [Setup bpftrace metrics.] ************************************************* task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:66 Wednesday 09 October 2024 02:04:59 -0400 (0:00:00.019) 0:00:03.401 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_bpftrace | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [Setup metric querying service.] ****************************************** task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:75 Wednesday 09 October 2024 02:05:00 -0400 (0:00:00.019) 0:00:03.420 ***** included: fedora.linux_system_roles.private_metrics_subrole_keyserver for managed-node3 TASK [fedora.linux_system_roles.private_metrics_subrole_keyserver : Set platform/version specific variables] *** task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:4 Wednesday 09 October 2024 02:05:00 -0400 (0:00:00.045) 0:00:03.465 ***** ok: [managed-node3] => (item=/tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/default.yml) => { "ansible_facts": { "__keyserver_conf_link": "/etc" }, "ansible_included_var_files": [ "/tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/default.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/default.yml" } skipping: [managed-node3] => (item=/tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=/tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/RedHat_x86_64.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/RedHat_x86_64.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=/tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=/tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_x86_64.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_x86_64.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=/tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_9.yml) => { "ansible_facts": { "__keyserver_conf_file": "redis.conf", "__keyserver_conf_path": "/etc/redis", "__keyserver_loaded_modules": [], "__keyserver_name": "redis", "__keyserver_packages": [ "redis" ], "__keyserver_packages_extra": [] }, "ansible_included_var_files": [ "/tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_9.yml" } skipping: [managed-node3] => (item=/tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_9_x86_64.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_9_x86_64.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=/tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_9.yml) => { "ansible_facts": { "__keyserver_conf_file": "redis.conf", "__keyserver_conf_path": "/etc/redis", "__keyserver_loaded_modules": [], "__keyserver_name": "redis", "__keyserver_packages": [ "redis" ], "__keyserver_packages_extra": [] }, "ansible_included_var_files": [ "/tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_9.yml" } skipping: [managed-node3] => (item=/tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_9_x86_64.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/vars/CentOS_9_x86_64.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_keyserver : Check if system is ostree] *** task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:22 Wednesday 09 October 2024 02:05:00 -0400 (0:00:00.066) 0:00:03.532 ***** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.private_metrics_subrole_keyserver : Set flag to indicate system is ostree] *** task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:27 Wednesday 09 October 2024 02:05:00 -0400 (0:00:00.497) 0:00:04.029 ***** ok: [managed-node3] => { "ansible_facts": { "__ansible_pcp_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_keyserver : Install key server packages] *** task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:31 Wednesday 09 October 2024 02:05:00 -0400 (0:00:00.038) 0:00:04.068 ***** changed: [managed-node3] => { "changed": true, "rc": 0, "results": [ "Installed: redis-6.2.7-1.el9.x86_64" ] } TASK [fedora.linux_system_roles.private_metrics_subrole_keyserver : Ensure key server configuration directory exists] *** task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:37 Wednesday 09 October 2024 02:05:03 -0400 (0:00:02.837) 0:00:06.905 ***** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0750", "owner": "redis", "path": "/etc/redis", "secontext": "system_u:object_r:redis_conf_t:s0", "size": 45, "state": "directory", "uid": 990 } TASK [fedora.linux_system_roles.private_metrics_subrole_keyserver : Ensure key server is configured] *** task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/tasks/main.yml:46 Wednesday 09 October 2024 02:05:03 -0400 (0:00:00.458) 0:00:07.364 ***** An exception occurred during task execution. To see the full traceback, use -vvv. The error was: ansible.errors.AnsibleUndefinedVariable: 'redis_save_to_disk' is undefined failed: [managed-node3] (item=/tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/templates/CentOS_9_keyserver.conf.j2) => { "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_keyserver/templates/CentOS_9_keyserver.conf.j2" } MSG: AnsibleUndefinedVariable: 'redis_save_to_disk' is undefined TASK [Handle failure case] ***************************************************** task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_query.yml:44 Wednesday 09 October 2024 02:05:04 -0400 (0:00:00.136) 0:00:07.500 ***** included: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml for managed-node3 TASK [Collect logs] ************************************************************ task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml:2 Wednesday 09 October 2024 02:05:04 -0400 (0:00:00.047) 0:00:07.548 ***** ok: [managed-node3] => { "changed": false, "cmd": "journalctl -ex\necho '##################'\necho List of SELinux AVCs - note list may be empty\ngrep type=AVC /var/log/audit/audit.log\necho '##################'\nls -alrtF /run\nif [ -d /run/pcp ]; then\n ls -alrtF /run/pcp\nelse\n echo ERROR - /run/pcp does not exist\nfi\n", "delta": "0:00:00.043433", "end": "2024-10-09 02:05:04.578706", "rc": 0, "start": "2024-10-09 02:05:04.535273" } STDOUT: Oct 09 01:55:50 localhost systemd[1]: Finished Load Kernel Module configfs. ░░ Subject: A start job for unit modprobe@configfs.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@configfs.service has finished successfully. ░░ ░░ The job identifier is 136. Oct 09 01:55:50 localhost systemd[1]: Mounting Kernel Configuration File System... ░░ Subject: A start job for unit sys-kernel-config.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-kernel-config.mount has begun execution. ░░ ░░ The job identifier is 135. Oct 09 01:55:50 localhost systemd[1]: Mounted Kernel Configuration File System. ░░ Subject: A start job for unit sys-kernel-config.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-kernel-config.mount has finished successfully. ░░ ░░ The job identifier is 135. Oct 09 01:55:50 localhost kernel: fuse: init (API version 7.36) Oct 09 01:55:50 localhost systemd[1]: modprobe@fuse.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@fuse.service has successfully entered the 'dead' state. Oct 09 01:55:50 localhost systemd[1]: Finished Load Kernel Module fuse. ░░ Subject: A start job for unit modprobe@fuse.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@fuse.service has finished successfully. ░░ ░░ The job identifier is 164. Oct 09 01:55:50 localhost systemd[1]: Mounting FUSE Control File System... ░░ Subject: A start job for unit sys-fs-fuse-connections.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-fs-fuse-connections.mount has begun execution. ░░ ░░ The job identifier is 163. Oct 09 01:55:50 localhost systemd[1]: Mounted FUSE Control File System. ░░ Subject: A start job for unit sys-fs-fuse-connections.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-fs-fuse-connections.mount has finished successfully. ░░ ░░ The job identifier is 163. Oct 09 01:55:50 localhost systemd[1]: Finished Create Static Device Nodes in /dev. ░░ Subject: A start job for unit systemd-tmpfiles-setup-dev.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup-dev.service has finished successfully. ░░ ░░ The job identifier is 127. Oct 09 01:55:50 localhost systemd[1]: Reached target Preparation for Local File Systems. ░░ Subject: A start job for unit local-fs-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit local-fs-pre.target has finished successfully. ░░ ░░ The job identifier is 133. Oct 09 01:55:50 localhost systemd[1]: Reached target Local File Systems. ░░ Subject: A start job for unit local-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit local-fs.target has finished successfully. ░░ ░░ The job identifier is 131. Oct 09 01:55:50 localhost systemd[1]: Rebuild Dynamic Linker Cache was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit ldconfig.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit ldconfig.service has finished successfully. ░░ ░░ The job identifier is 179. Oct 09 01:55:50 localhost systemd[1]: Mark the need to relabel after reboot was skipped because of an unmet condition check (ConditionSecurity=!selinux). ░░ Subject: A start job for unit selinux-autorelabel-mark.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit selinux-autorelabel-mark.service has finished successfully. ░░ ░░ The job identifier is 161. Oct 09 01:55:50 localhost systemd[1]: Set Up Additional Binary Formats was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-binfmt.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-binfmt.service has finished successfully. ░░ ░░ The job identifier is 176. Oct 09 01:55:50 localhost systemd[1]: Update Boot Loader Random Seed was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-boot-random-seed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-boot-random-seed.service has finished successfully. ░░ ░░ The job identifier is 166. Oct 09 01:55:50 localhost systemd[1]: Starting Automatic Boot Loader Update... ░░ Subject: A start job for unit systemd-boot-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-boot-update.service has begun execution. ░░ ░░ The job identifier is 171. Oct 09 01:55:50 localhost systemd[1]: Starting Commit a transient machine-id on disk... ░░ Subject: A start job for unit systemd-machine-id-commit.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-machine-id-commit.service has begun execution. ░░ ░░ The job identifier is 129. Oct 09 01:55:50 localhost systemd[1]: Starting Create Volatile Files and Directories... ░░ Subject: A start job for unit systemd-tmpfiles-setup.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup.service has begun execution. ░░ ░░ The job identifier is 168. Oct 09 01:55:50 localhost systemd[1]: Starting Rule-based Manager for Device Events and Files... ░░ Subject: A start job for unit systemd-udevd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udevd.service has begun execution. ░░ ░░ The job identifier is 155. Oct 09 01:55:50 localhost bootctl[492]: Couldn't find EFI system partition, skipping. Oct 09 01:55:50 localhost systemd[1]: Finished Automatic Boot Loader Update. ░░ Subject: A start job for unit systemd-boot-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-boot-update.service has finished successfully. ░░ ░░ The job identifier is 171. Oct 09 01:55:50 localhost systemd-udevd[495]: Using default interface naming scheme 'rhel-9.0'. Oct 09 01:55:50 localhost kernel: ACPI: bus type drm_connector registered Oct 09 01:55:50 localhost systemd[1]: modprobe@drm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@drm.service has successfully entered the 'dead' state. Oct 09 01:55:50 localhost systemd[1]: Finished Load Kernel Module drm. ░░ Subject: A start job for unit modprobe@drm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@drm.service has finished successfully. ░░ ░░ The job identifier is 209. Oct 09 01:55:50 localhost systemd[1]: etc-machine\x2did.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit etc-machine\x2did.mount has successfully entered the 'dead' state. Oct 09 01:55:50 localhost systemd[1]: Finished Commit a transient machine-id on disk. ░░ Subject: A start job for unit systemd-machine-id-commit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-machine-id-commit.service has finished successfully. ░░ ░░ The job identifier is 129. Oct 09 01:55:50 localhost systemd[1]: Finished Create Volatile Files and Directories. ░░ Subject: A start job for unit systemd-tmpfiles-setup.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup.service has finished successfully. ░░ ░░ The job identifier is 168. Oct 09 01:55:50 localhost systemd[1]: Mounting RPC Pipe File System... ░░ Subject: A start job for unit var-lib-nfs-rpc_pipefs.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit var-lib-nfs-rpc_pipefs.mount has begun execution. ░░ ░░ The job identifier is 241. Oct 09 01:55:50 localhost systemd[1]: Starting Security Auditing Service... ░░ Subject: A start job for unit auditd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has begun execution. ░░ ░░ The job identifier is 206. Oct 09 01:55:50 localhost systemd[1]: Starting RPC Bind... ░░ Subject: A start job for unit rpcbind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpcbind.service has begun execution. ░░ ░░ The job identifier is 217. Oct 09 01:55:50 localhost systemd[1]: Rebuild Journal Catalog was skipped because of an unmet condition check (ConditionNeedsUpdate=/var). ░░ Subject: A start job for unit systemd-journal-catalog-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-journal-catalog-update.service has finished successfully. ░░ ░░ The job identifier is 147. Oct 09 01:55:50 localhost systemd[1]: Update is Completed was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-update-done.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-done.service has finished successfully. ░░ ░░ The job identifier is 122. Oct 09 01:55:50 localhost kernel: RPC: Registered named UNIX socket transport module. Oct 09 01:55:50 localhost kernel: RPC: Registered udp transport module. Oct 09 01:55:50 localhost kernel: RPC: Registered tcp transport module. Oct 09 01:55:50 localhost kernel: RPC: Registered tcp-with-tls transport module. Oct 09 01:55:50 localhost kernel: RPC: Registered tcp NFSv4.1 backchannel transport module. Oct 09 01:55:50 localhost systemd[1]: Mounted RPC Pipe File System. ░░ Subject: A start job for unit var-lib-nfs-rpc_pipefs.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit var-lib-nfs-rpc_pipefs.mount has finished successfully. ░░ ░░ The job identifier is 241. Oct 09 01:55:50 localhost systemd[1]: Reached target rpc_pipefs.target. ░░ Subject: A start job for unit rpc_pipefs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc_pipefs.target has finished successfully. ░░ ░░ The job identifier is 240. Oct 09 01:55:50 localhost systemd[1]: Started Rule-based Manager for Device Events and Files. ░░ Subject: A start job for unit systemd-udevd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udevd.service has finished successfully. ░░ ░░ The job identifier is 155. Oct 09 01:55:50 localhost systemd[1]: Starting Load Kernel Module configfs... ░░ Subject: A start job for unit modprobe@configfs.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@configfs.service has begun execution. ░░ ░░ The job identifier is 261. Oct 09 01:55:50 localhost systemd[1]: modprobe@configfs.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@configfs.service has successfully entered the 'dead' state. Oct 09 01:55:50 localhost systemd[1]: Finished Load Kernel Module configfs. ░░ Subject: A start job for unit modprobe@configfs.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@configfs.service has finished successfully. ░░ ░░ The job identifier is 261. Oct 09 01:55:50 localhost systemd[1]: Condition check resulted in /dev/ttyS0 being skipped. ░░ Subject: A start job for unit dev-ttyS0.device has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dev-ttyS0.device has finished successfully. ░░ ░░ The job identifier is 228. Oct 09 01:55:51 localhost systemd[1]: Started RPC Bind. ░░ Subject: A start job for unit rpcbind.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpcbind.service has finished successfully. ░░ ░░ The job identifier is 217. Oct 09 01:55:51 localhost auditd[533]: No plugins found, not dispatching events Oct 09 01:55:51 localhost auditd[533]: Init complete, auditd 3.1.5 listening for events (startup state enable) Oct 09 01:55:51 localhost kernel: input: PC Speaker as /devices/platform/pcspkr/input/input5 Oct 09 01:55:51 localhost systemd-udevd[506]: Network interface NamePolicy= disabled on kernel command line. Oct 09 01:55:51 localhost kernel: RAPL PMU: API unit is 2^-32 Joules, 0 fixed counters, 655360 ms ovfl timer Oct 09 01:55:51 localhost kernel: cirrus 0000:00:02.0: vgaarb: deactivate vga console Oct 09 01:55:51 localhost kernel: Console: switching to colour dummy device 80x25 Oct 09 01:55:51 localhost kernel: [drm] Initialized cirrus 2.0.0 2019 for 0000:00:02.0 on minor 0 Oct 09 01:55:51 localhost kernel: fbcon: cirrusdrmfb (fb0) is primary device Oct 09 01:55:51 localhost kernel: Console: switching to colour frame buffer device 128x48 Oct 09 01:55:51 localhost kernel: cirrus 0000:00:02.0: [drm] fb0: cirrusdrmfb frame buffer device Oct 09 01:55:51 localhost kernel: piix4_smbus 0000:00:01.3: SMBus base address uninitialized - upgrade BIOS or use force_addr=0xaddr Oct 09 01:55:52 localhost augenrules[536]: /sbin/augenrules: No change Oct 09 01:55:52 localhost augenrules[569]: No rules Oct 09 01:55:52 localhost augenrules[569]: enabled 1 Oct 09 01:55:52 localhost augenrules[569]: failure 1 Oct 09 01:55:52 localhost augenrules[569]: pid 533 Oct 09 01:55:52 localhost augenrules[569]: rate_limit 0 Oct 09 01:55:52 localhost augenrules[569]: backlog_limit 8192 Oct 09 01:55:52 localhost augenrules[569]: lost 0 Oct 09 01:55:52 localhost augenrules[569]: backlog 2 Oct 09 01:55:52 localhost augenrules[569]: backlog_wait_time 60000 Oct 09 01:55:52 localhost augenrules[569]: backlog_wait_time_actual 0 Oct 09 01:55:52 localhost augenrules[569]: enabled 1 Oct 09 01:55:52 localhost augenrules[569]: failure 1 Oct 09 01:55:52 localhost augenrules[569]: pid 533 Oct 09 01:55:52 localhost augenrules[569]: rate_limit 0 Oct 09 01:55:52 localhost augenrules[569]: backlog_limit 8192 Oct 09 01:55:52 localhost augenrules[569]: lost 0 Oct 09 01:55:52 localhost augenrules[569]: backlog 4 Oct 09 01:55:52 localhost augenrules[569]: backlog_wait_time 60000 Oct 09 01:55:52 localhost augenrules[569]: backlog_wait_time_actual 0 Oct 09 01:55:52 localhost augenrules[569]: enabled 1 Oct 09 01:55:52 localhost augenrules[569]: failure 1 Oct 09 01:55:52 localhost augenrules[569]: pid 533 Oct 09 01:55:52 localhost augenrules[569]: rate_limit 0 Oct 09 01:55:52 localhost augenrules[569]: backlog_limit 8192 Oct 09 01:55:52 localhost augenrules[569]: lost 0 Oct 09 01:55:52 localhost augenrules[569]: backlog 4 Oct 09 01:55:52 localhost augenrules[569]: backlog_wait_time 60000 Oct 09 01:55:52 localhost augenrules[569]: backlog_wait_time_actual 0 Oct 09 01:55:52 localhost systemd[1]: Started Security Auditing Service. ░░ Subject: A start job for unit auditd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has finished successfully. ░░ ░░ The job identifier is 206. Oct 09 01:55:52 localhost systemd[1]: Starting Record System Boot/Shutdown in UTMP... ░░ Subject: A start job for unit systemd-update-utmp.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp.service has begun execution. ░░ ░░ The job identifier is 214. Oct 09 01:55:52 localhost systemd[1]: Finished Record System Boot/Shutdown in UTMP. ░░ Subject: A start job for unit systemd-update-utmp.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp.service has finished successfully. ░░ ░░ The job identifier is 214. Oct 09 01:55:52 localhost systemd[1]: Reached target System Initialization. ░░ Subject: A start job for unit sysinit.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sysinit.target has finished successfully. ░░ ░░ The job identifier is 120. Oct 09 01:55:52 localhost systemd[1]: Started dnf makecache --timer. ░░ Subject: A start job for unit dnf-makecache.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dnf-makecache.timer has finished successfully. ░░ ░░ The job identifier is 191. Oct 09 01:55:52 localhost systemd[1]: Started Daily rotation of log files. ░░ Subject: A start job for unit logrotate.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.timer has finished successfully. ░░ ░░ The job identifier is 197. Oct 09 01:55:52 localhost systemd[1]: Started Daily Cleanup of Temporary Directories. ░░ Subject: A start job for unit systemd-tmpfiles-clean.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-clean.timer has finished successfully. ░░ ░░ The job identifier is 190. Oct 09 01:55:52 localhost systemd[1]: Reached target Timer Units. ░░ Subject: A start job for unit timers.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit timers.target has finished successfully. ░░ ░░ The job identifier is 189. Oct 09 01:55:52 localhost systemd[1]: Listening on D-Bus System Message Bus Socket. ░░ Subject: A start job for unit dbus.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus.socket has finished successfully. ░░ ░░ The job identifier is 182. Oct 09 01:55:52 localhost systemd[1]: Listening on SSSD Kerberos Cache Manager responder socket. ░░ Subject: A start job for unit sssd-kcm.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sssd-kcm.socket has finished successfully. ░░ ░░ The job identifier is 183. Oct 09 01:55:52 localhost systemd[1]: Reached target Socket Units. ░░ Subject: A start job for unit sockets.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sockets.target has finished successfully. ░░ ░░ The job identifier is 181. Oct 09 01:55:52 localhost systemd[1]: Starting D-Bus System Message Bus... ░░ Subject: A start job for unit dbus-broker.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus-broker.service has begun execution. ░░ ░░ The job identifier is 195. Oct 09 01:55:52 localhost systemd[1]: TPM2 PCR Barrier (Initialization) was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f). ░░ Subject: A start job for unit systemd-pcrphase-sysinit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-pcrphase-sysinit.service has finished successfully. ░░ ░░ The job identifier is 146. Oct 09 01:55:53 localhost systemd[1]: Started D-Bus System Message Bus. ░░ Subject: A start job for unit dbus-broker.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus-broker.service has finished successfully. ░░ ░░ The job identifier is 195. Oct 09 01:55:53 localhost systemd[1]: Reached target Basic System. ░░ Subject: A start job for unit basic.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit basic.target has finished successfully. ░░ ░░ The job identifier is 117. Oct 09 01:55:53 localhost dbus-broker-lau[577]: Ready Oct 09 01:55:53 localhost systemd[1]: Starting NTP client/server... ░░ Subject: A start job for unit chronyd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit chronyd.service has begun execution. ░░ ░░ The job identifier is 210. Oct 09 01:55:53 localhost systemd[1]: Starting Initial cloud-init job (pre-networking)... ░░ Subject: A start job for unit cloud-init-local.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init-local.service has begun execution. ░░ ░░ The job identifier is 222. Oct 09 01:55:53 localhost systemd[1]: Starting Restore /run/initramfs on shutdown... ░░ Subject: A start job for unit dracut-shutdown.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-shutdown.service has begun execution. ░░ ░░ The job identifier is 175. Oct 09 01:55:53 localhost systemd[1]: Started irqbalance daemon. ░░ Subject: A start job for unit irqbalance.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit irqbalance.service has finished successfully. ░░ ░░ The job identifier is 218. Oct 09 01:55:53 localhost systemd[1]: Load CPU microcode update was skipped because of an unmet condition check (ConditionPathExists=/sys/devices/system/cpu/microcode/reload). ░░ Subject: A start job for unit microcode.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit microcode.service has finished successfully. ░░ ░░ The job identifier is 180. Oct 09 01:55:53 localhost systemd[1]: Started Hardware RNG Entropy Gatherer Daemon. ░░ Subject: A start job for unit rngd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rngd.service has finished successfully. ░░ ░░ The job identifier is 215. Oct 09 01:55:53 localhost systemd[1]: OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ecdsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ecdsa.service has finished successfully. ░░ ░░ The job identifier is 201. Oct 09 01:55:53 localhost systemd[1]: OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ed25519.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ed25519.service has finished successfully. ░░ ░░ The job identifier is 204. Oct 09 01:55:53 localhost systemd[1]: OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@rsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@rsa.service has finished successfully. ░░ ░░ The job identifier is 203. Oct 09 01:55:53 localhost systemd[1]: Reached target sshd-keygen.target. ░░ Subject: A start job for unit sshd-keygen.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen.target has finished successfully. ░░ ░░ The job identifier is 200. Oct 09 01:55:53 localhost systemd[1]: System Security Services Daemon was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit sssd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sssd.service has finished successfully. ░░ ░░ The job identifier is 245. Oct 09 01:55:53 localhost systemd[1]: Reached target User and Group Name Lookups. ░░ Subject: A start job for unit nss-user-lookup.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit nss-user-lookup.target has finished successfully. ░░ ░░ The job identifier is 246. Oct 09 01:55:53 localhost systemd[1]: Starting User Login Management... ░░ Subject: A start job for unit systemd-logind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has begun execution. ░░ ░░ The job identifier is 207. Oct 09 01:55:53 localhost systemd[1]: Starting Rotate log files... ░░ Subject: A start job for unit logrotate.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has begun execution. ░░ ░░ The job identifier is 267. Oct 09 01:55:53 localhost systemd[1]: Finished Restore /run/initramfs on shutdown. ░░ Subject: A start job for unit dracut-shutdown.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-shutdown.service has finished successfully. ░░ ░░ The job identifier is 175. Oct 09 01:55:53 localhost /usr/sbin/irqbalance[582]: libcap-ng used by "/usr/sbin/irqbalance" failed dropping bounding set due to not having CAP_SETPCAP in capng_apply Oct 09 01:55:53 localhost systemd-logind[584]: New seat seat0. ░░ Subject: A new seat seat0 is now available ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new seat seat0 has been configured and is now available. Oct 09 01:55:53 localhost systemd-logind[584]: Watching system buttons on /dev/input/event0 (Power Button) Oct 09 01:55:53 localhost systemd-logind[584]: Watching system buttons on /dev/input/event1 (Sleep Button) Oct 09 01:55:53 localhost systemd-logind[584]: Watching system buttons on /dev/input/event2 (AT Translated Set 2 keyboard) Oct 09 01:55:53 localhost systemd[1]: Started User Login Management. ░░ Subject: A start job for unit systemd-logind.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has finished successfully. ░░ ░░ The job identifier is 207. Oct 09 01:55:54 localhost systemd[1]: logrotate.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit logrotate.service has successfully entered the 'dead' state. Oct 09 01:55:54 localhost systemd[1]: Finished Rotate log files. ░░ Subject: A start job for unit logrotate.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has finished successfully. ░░ ░░ The job identifier is 267. Oct 09 01:55:54 localhost chronyd[601]: chronyd version 4.6 starting (+CMDMON +NTP +REFCLOCK +RTC +PRIVDROP +SCFILTER +SIGND +ASYNCDNS +NTS +SECHASH +IPV6 +DEBUG) Oct 09 01:55:54 localhost chronyd[601]: Loaded 0 symmetric keys Oct 09 01:55:54 localhost rngd[583]: Disabling 7: PKCS11 Entropy generator (pkcs11) Oct 09 01:55:54 localhost rngd[583]: Disabling 5: NIST Network Entropy Beacon (nist) Oct 09 01:55:54 localhost rngd[583]: Disabling 9: Qrypt quantum entropy beacon (qrypt) Oct 09 01:55:54 localhost rngd[583]: Initializing available sources Oct 09 01:55:54 localhost rngd[583]: [hwrng ]: Initialization Failed Oct 09 01:55:54 localhost rngd[583]: [rdrand]: Enabling RDRAND rng support Oct 09 01:55:54 localhost rngd[583]: [rdrand]: Initialized Oct 09 01:55:54 localhost rngd[583]: [jitter]: JITTER timeout set to 5 sec Oct 09 01:55:54 localhost rngd[583]: [jitter]: Initializing AES buffer Oct 09 01:55:54 localhost chronyd[601]: Using right/UTC timezone to obtain leap second data Oct 09 01:55:54 localhost chronyd[601]: Frequency 0.000 +/- 1000000.000 ppm read from /var/lib/chrony/drift Oct 09 01:55:54 localhost chronyd[601]: Loaded seccomp filter (level 2) Oct 09 01:55:54 localhost systemd[1]: Started NTP client/server. ░░ Subject: A start job for unit chronyd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit chronyd.service has finished successfully. ░░ ░░ The job identifier is 210. Oct 09 01:55:59 localhost rngd[583]: [jitter]: Unable to obtain AES key, disabling JITTER source Oct 09 01:55:59 localhost rngd[583]: [jitter]: Initialization Failed Oct 09 01:55:59 localhost rngd[583]: [namedpipe]: Initialization Failed Oct 09 01:55:59 localhost rngd[583]: Process privileges have been dropped to 2:2 Oct 09 01:56:01 localhost cloud-init[607]: Cloud-init v. 23.4-19.el9 running 'init-local' at Wed, 09 Oct 2024 05:56:01 +0000. Up 38.80 seconds. Oct 09 01:56:02 localhost dhclient[610]: Internet Systems Consortium DHCP Client 4.4.2b1 Oct 09 01:56:02 localhost dhclient[610]: Copyright 2004-2019 Internet Systems Consortium. Oct 09 01:56:02 localhost dhclient[610]: All rights reserved. Oct 09 01:56:02 localhost dhclient[610]: For info, please visit https://www.isc.org/software/dhcp/ Oct 09 01:56:02 localhost dhclient[610]: Oct 09 01:56:02 localhost dhclient[610]: Listening on LPF/eth0/0e:b5:d4:56:f4:53 Oct 09 01:56:02 localhost dhclient[610]: Sending on LPF/eth0/0e:b5:d4:56:f4:53 Oct 09 01:56:02 localhost dhclient[610]: Sending on Socket/fallback Oct 09 01:56:02 localhost dhclient[610]: DHCPDISCOVER on eth0 to 255.255.255.255 port 67 interval 6 (xid=0xf52afe6e) Oct 09 01:56:02 localhost dhclient[610]: DHCPOFFER of 10.31.43.227 from 10.31.40.1 Oct 09 01:56:02 localhost dhclient[610]: DHCPREQUEST for 10.31.43.227 on eth0 to 255.255.255.255 port 67 (xid=0xf52afe6e) Oct 09 01:56:02 localhost dhclient[610]: DHCPACK of 10.31.43.227 from 10.31.40.1 (xid=0xf52afe6e) Oct 09 01:56:02 localhost dhclient[610]: bound to 10.31.43.227 -- renewal in 1517 seconds. Oct 09 01:56:02 localhost systemd[1]: Starting Hostname Service... ░░ Subject: A start job for unit systemd-hostnamed.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has begun execution. ░░ ░░ The job identifier is 331. Oct 09 01:56:02 localhost systemd[1]: Started Hostname Service. ░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has finished successfully. ░░ ░░ The job identifier is 331. Oct 09 01:56:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd-hostnamed[625]: Hostname set to (static) Oct 09 01:56:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Finished Initial cloud-init job (pre-networking). ░░ Subject: A start job for unit cloud-init-local.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init-local.service has finished successfully. ░░ ░░ The job identifier is 222. Oct 09 01:56:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Reached target Preparation for Network. ░░ Subject: A start job for unit network-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network-pre.target has finished successfully. ░░ ░░ The job identifier is 178. Oct 09 01:56:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager... ░░ Subject: A start job for unit NetworkManager.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager.service has begun execution. ░░ ░░ The job identifier is 194. Oct 09 01:56:03 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453363.6289] NetworkManager (version 1.51.0-1.el9) is starting... (boot:09a0688e-33b9-4d56-bb3c-f14dff446fcc) Oct 09 01:56:03 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453363.6292] Read config: /etc/NetworkManager/NetworkManager.conf (run: 15-carrier-timeout.conf) Oct 09 01:56:03 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453363.7636] manager[0x555c42206080]: monitoring kernel firmware directory '/lib/firmware'. Oct 09 01:56:03 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453363.7659] hostname: hostname: using hostnamed Oct 09 01:56:03 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453363.7659] hostname: static hostname changed from (none) to "ip-10-31-43-227.us-east-1.aws.redhat.com" Oct 09 01:56:03 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453363.7672] dns-mgr: init: dns=default,systemd-resolved rc-manager=symlink (auto) Oct 09 01:56:03 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453363.8251] manager[0x555c42206080]: rfkill: Wi-Fi hardware radio set enabled Oct 09 01:56:03 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453363.8252] manager[0x555c42206080]: rfkill: WWAN hardware radio set enabled Oct 09 01:56:03 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453363.8391] Loaded device plugin: NMTeamFactory (/usr/lib64/NetworkManager/1.51.0-1.el9/libnm-device-plugin-team.so) Oct 09 01:56:03 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453363.8391] manager: rfkill: Wi-Fi enabled by radio killswitch; enabled by state file Oct 09 01:56:03 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453363.8396] manager: rfkill: WWAN enabled by radio killswitch; enabled by state file Oct 09 01:56:03 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453363.8396] manager: Networking is enabled by state file Oct 09 01:56:03 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453363.8434] settings: Loaded settings plugin: keyfile (internal) Oct 09 01:56:03 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Listening on Load/Save RF Kill Switch Status /dev/rfkill Watch. ░░ Subject: A start job for unit systemd-rfkill.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-rfkill.socket has finished successfully. ░░ ░░ The job identifier is 396. Oct 09 01:56:03 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 403. Oct 09 01:56:03 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453363.9642] settings: Loaded settings plugin: ifcfg-rh ("/usr/lib64/NetworkManager/1.51.0-1.el9/libnm-settings-plugin-ifcfg-rh.so") Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 0 affinity: Input/output error Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: IRQ 0 affinity is now unmanaged Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 48 affinity: Input/output error Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: IRQ 48 affinity is now unmanaged Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 49 affinity: Input/output error Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: IRQ 49 affinity is now unmanaged Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 50 affinity: Input/output error Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: IRQ 50 affinity is now unmanaged Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 51 affinity: Input/output error Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: IRQ 51 affinity is now unmanaged Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 52 affinity: Input/output error Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: IRQ 52 affinity is now unmanaged Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 53 affinity: Input/output error Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: IRQ 53 affinity is now unmanaged Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 54 affinity: Input/output error Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: IRQ 54 affinity is now unmanaged Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 55 affinity: Input/output error Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: IRQ 55 affinity is now unmanaged Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 56 affinity: Input/output error Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: IRQ 56 affinity is now unmanaged Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 57 affinity: Input/output error Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: IRQ 57 affinity is now unmanaged Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 58 affinity: Input/output error Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: IRQ 58 affinity is now unmanaged Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 59 affinity: Input/output error Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com irqbalance[582]: IRQ 59 affinity is now unmanaged Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.0782] Warning: the ifcfg-rh plugin is deprecated, please migrate connections to the keyfile format using "nmcli connection migrate" Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.0790] dhcp: init: Using DHCP client 'internal' Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.0793] manager: (lo): new Loopback device (/org/freedesktop/NetworkManager/Devices/1) Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.0803] device (lo): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.0809] device (lo): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.0815] device (lo): Activation: starting connection 'lo' (aacac247-44a9-41bd-b248-932262bf45b1) Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.0821] manager: (eth0): new Ethernet device (/org/freedesktop/NetworkManager/Devices/2) Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.0825] device (eth0): state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external') Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Started Network Manager. ░░ Subject: A start job for unit NetworkManager.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager.service has finished successfully. ░░ ░░ The job identifier is 194. Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.0853] bus-manager: acquired D-Bus service "org.freedesktop.NetworkManager" Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.0857] device (lo): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.0859] device (lo): state change: prepare -> config (reason 'none', managed-type: 'external') Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.0861] device (lo): state change: config -> ip-config (reason 'none', managed-type: 'external') Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.0863] device (eth0): carrier: link connected Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.0870] device (lo): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.0883] device (eth0): state change: unavailable -> disconnected (reason 'carrier-changed', managed-type: 'full') Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.0889] policy: auto-activating connection 'System eth0' (5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03) Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.0892] device (eth0): Activation: starting connection 'System eth0' (5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03) Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.0893] device (eth0): state change: disconnected -> prepare (reason 'none', managed-type: 'full') Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.0895] manager: NetworkManager state is now CONNECTING Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.0896] device (eth0): state change: prepare -> config (reason 'none', managed-type: 'full') Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.0902] device (eth0): state change: config -> ip-config (reason 'none', managed-type: 'full') Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.0905] dhcp4 (eth0): activation: beginning transaction (timeout in 45 seconds) Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Reached target Network. ░░ Subject: A start job for unit network.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network.target has finished successfully. ░░ ░░ The job identifier is 196. Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.0949] dhcp4 (eth0): state changed new lease, address=10.31.43.227 Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.0952] policy: set 'System eth0' (eth0) as default for IPv4 routing and DNS Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager Wait Online... ░░ Subject: A start job for unit NetworkManager-wait-online.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-wait-online.service has begun execution. ░░ ░░ The job identifier is 193. Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Starting GSSAPI Proxy Daemon... ░░ Subject: A start job for unit gssproxy.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit gssproxy.service has begun execution. ░░ ░░ The job identifier is 238. Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.1082] device (eth0): state change: ip-config -> ip-check (reason 'none', managed-type: 'full') Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Started Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 403. Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.1875] device (lo): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.1877] device (eth0): state change: ip-check -> secondaries (reason 'none', managed-type: 'full') Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.1879] device (lo): state change: secondaries -> activated (reason 'none', managed-type: 'external') Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.1884] device (lo): Activation: successful, device activated. Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.1888] device (eth0): state change: secondaries -> activated (reason 'none', managed-type: 'full') Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.1890] manager: NetworkManager state is now CONNECTED_SITE Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.1892] device (eth0): Activation: successful, device activated. Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.1898] manager: NetworkManager state is now CONNECTED_GLOBAL Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com NetworkManager[629]: [1728453364.1900] manager: startup complete Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Finished Network Manager Wait Online. ░░ Subject: A start job for unit NetworkManager-wait-online.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-wait-online.service has finished successfully. ░░ ░░ The job identifier is 193. Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Starting Initial cloud-init job (metadata service crawler)... ░░ Subject: A start job for unit cloud-init.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.service has begun execution. ░░ ░░ The job identifier is 223. Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Started GSSAPI Proxy Daemon. ░░ Subject: A start job for unit gssproxy.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit gssproxy.service has finished successfully. ░░ ░░ The job identifier is 238. Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: RPC security service for NFS client and server was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab). ░░ Subject: A start job for unit rpc-gssd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-gssd.service has finished successfully. ░░ ░░ The job identifier is 239. Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Reached target NFS client services. ░░ Subject: A start job for unit nfs-client.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit nfs-client.target has finished successfully. ░░ ░░ The job identifier is 236. Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Reached target Preparation for Remote File Systems. ░░ Subject: A start job for unit remote-fs-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-fs-pre.target has finished successfully. ░░ ░░ The job identifier is 243. Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Reached target Remote File Systems. ░░ Subject: A start job for unit remote-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-fs.target has finished successfully. ░░ ░░ The job identifier is 235. Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: TPM2 PCR Barrier (User) was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f). ░░ Subject: A start job for unit systemd-pcrphase.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-pcrphase.service has finished successfully. ░░ ░░ The job identifier is 139. Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com chronyd[601]: Added source 10.11.160.238 Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com chronyd[601]: Added source 10.18.100.10 Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com chronyd[601]: Added source 10.2.32.37 Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com chronyd[601]: Added source 10.2.32.38 Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: Cloud-init v. 23.4-19.el9 running 'init' at Wed, 09 Oct 2024 05:56:04 +0000. Up 41.48 seconds. Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: ci-info: ++++++++++++++++++++++++++++++++++++++Net device info+++++++++++++++++++++++++++++++++++++++ Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: ci-info: | Device | Up | Address | Mask | Scope | Hw-Address | Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: ci-info: | eth0 | True | 10.31.43.227 | 255.255.252.0 | global | 0e:b5:d4:56:f4:53 | Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: ci-info: | eth0 | True | fe80::cb5:d4ff:fe56:f453/64 | . | link | 0e:b5:d4:56:f4:53 | Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: ci-info: | lo | True | 127.0.0.1 | 255.0.0.0 | host | . | Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: ci-info: | lo | True | ::1/128 | . | host | . | Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: ci-info: ++++++++++++++++++++++++++++Route IPv4 info+++++++++++++++++++++++++++++ Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: ci-info: | Route | Destination | Gateway | Genmask | Interface | Flags | Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: ci-info: | 0 | 0.0.0.0 | 10.31.40.1 | 0.0.0.0 | eth0 | UG | Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: ci-info: | 1 | 10.31.40.0 | 0.0.0.0 | 255.255.252.0 | eth0 | U | Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: ci-info: +++++++++++++++++++Route IPv6 info+++++++++++++++++++ Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: ci-info: +-------+-------------+---------+-----------+-------+ Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: ci-info: | Route | Destination | Gateway | Interface | Flags | Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: ci-info: +-------+-------------+---------+-----------+-------+ Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: ci-info: | 1 | fe80::/64 | :: | eth0 | U | Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: ci-info: | 3 | multicast | :: | eth0 | U | Oct 09 01:56:04 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: ci-info: +-------+-------------+---------+-----------+-------+ Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: Generating public/private rsa key pair. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: Your identification has been saved in /etc/ssh/ssh_host_rsa_key Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: Your public key has been saved in /etc/ssh/ssh_host_rsa_key.pub Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: The key fingerprint is: Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: SHA256:QV3W4smZ5vXAGhw9REXgX5Io5R8eaDBJnmbZuxWqAv8 root@ip-10-31-43-227.us-east-1.aws.redhat.com Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: The key's randomart image is: Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: +---[RSA 3072]----+ Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | .o+o+=++o| Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | . .oXo++. | Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | . B+*BB..| Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | + oX=+*.| Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | . S oo++o.| Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | o .oo .| Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | o . . | Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | o | Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | E | Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: +----[SHA256]-----+ Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: Generating public/private dsa key pair. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: Your identification has been saved in /etc/ssh/ssh_host_dsa_key Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: Your public key has been saved in /etc/ssh/ssh_host_dsa_key.pub Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: The key fingerprint is: Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: SHA256:Nk4P7dpnQRFYU6a84HF/Oe45E0Xv1AZAIlToelTgY1w root@ip-10-31-43-227.us-east-1.aws.redhat.com Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: The key's randomart image is: Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: +---[DSA 1024]----+ Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | .++E+*+o | Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | o.ooo.+. .| Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | .=.o +. oo| Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | .o+ +.o B| Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | oS o.. .*o| Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | .+.= . .oo| Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | .. o . ..| Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | o o .o.| Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | . .o oo| Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: +----[SHA256]-----+ Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: Generating public/private ecdsa key pair. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: Your identification has been saved in /etc/ssh/ssh_host_ecdsa_key Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: Your public key has been saved in /etc/ssh/ssh_host_ecdsa_key.pub Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: The key fingerprint is: Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: SHA256:g+U9DLTuEmkl4BwztJpPQF57poe0uaLNv+mWgjUy6Nk root@ip-10-31-43-227.us-east-1.aws.redhat.com Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: The key's randomart image is: Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: +---[ECDSA 256]---+ Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | ..B . | Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | o + B . . | Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | o * + = | Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | = B O + | Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: |. o * * S + | Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: |+ oo + o . . | Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: |.=+.o.. . | Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: |.*.Eo. . | Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: |. o==. | Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: +----[SHA256]-----+ Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: Generating public/private ed25519 key pair. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: Your identification has been saved in /etc/ssh/ssh_host_ed25519_key Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: Your public key has been saved in /etc/ssh/ssh_host_ed25519_key.pub Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: The key fingerprint is: Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: SHA256:QjvQAtFc3k8gW5wRgZVNJMbsERTBE8SQM1Zs0s3L1EM root@ip-10-31-43-227.us-east-1.aws.redhat.com Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: The key's randomart image is: Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: +--[ED25519 256]--+ Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | o+ .+^^^ooE | Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | .ooB*#o= o | Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | o.=*.=.. . | Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | + ..oo | Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | + S . | Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | o | Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | | Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | | Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: | | Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[720]: +----[SHA256]-----+ Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Finished Initial cloud-init job (metadata service crawler). ░░ Subject: A start job for unit cloud-init.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.service has finished successfully. ░░ ░░ The job identifier is 223. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Reached target Cloud-config availability. ░░ Subject: A start job for unit cloud-config.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.target has finished successfully. ░░ ░░ The job identifier is 221. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Reached target Network is Online. ░░ Subject: A start job for unit network-online.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network-online.target has finished successfully. ░░ ░░ The job identifier is 192. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Starting Apply the settings specified in cloud-config... ░░ Subject: A start job for unit cloud-config.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.service has begun execution. ░░ ░░ The job identifier is 220. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Starting Crash recovery kernel arming... ░░ Subject: A start job for unit kdump.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit kdump.service has begun execution. ░░ ░░ The job identifier is 205. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Starting The restraint harness.... ░░ Subject: A start job for unit restraintd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit restraintd.service has begun execution. ░░ ░░ The job identifier is 233. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Starting Notify NFS peers of a restart... ░░ Subject: A start job for unit rpc-statd-notify.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-statd-notify.service has begun execution. ░░ ░░ The job identifier is 244. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Starting System Logging Service... ░░ Subject: A start job for unit rsyslog.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rsyslog.service has begun execution. ░░ ░░ The job identifier is 234. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Starting OpenSSH server daemon... ░░ Subject: A start job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 199. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Started The restraint harness.. ░░ Subject: A start job for unit restraintd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit restraintd.service has finished successfully. ░░ ░░ The job identifier is 233. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com sshd[799]: Server listening on 0.0.0.0 port 22. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com sshd[799]: Server listening on :: port 22. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Started OpenSSH server daemon. ░░ Subject: A start job for unit sshd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has finished successfully. ░░ ░░ The job identifier is 199. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com sm-notify[797]: Version 2.5.4 starting Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Started Notify NFS peers of a restart. ░░ Subject: A start job for unit rpc-statd-notify.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-statd-notify.service has finished successfully. ░░ ░░ The job identifier is 244. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[829]: Cloud-init v. 23.4-19.el9 running 'modules:config' at Wed, 09 Oct 2024 05:56:06 +0000. Up 43.70 seconds. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com sshd[799]: Received signal 15; terminating. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Stopping OpenSSH server daemon... ░░ Subject: A stop job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 481. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: sshd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit sshd.service has successfully entered the 'dead' state. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Stopped OpenSSH server daemon. ░░ Subject: A stop job for unit sshd.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd.service has finished. ░░ ░░ The job identifier is 481 and the job result is done. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Stopped target sshd-keygen.target. ░░ Subject: A stop job for unit sshd-keygen.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd-keygen.target has finished. ░░ ░░ The job identifier is 549 and the job result is done. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Stopping sshd-keygen.target... ░░ Subject: A stop job for unit sshd-keygen.target has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd-keygen.target has begun execution. ░░ ░░ The job identifier is 549. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ecdsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ecdsa.service has finished successfully. ░░ ░░ The job identifier is 545. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ed25519.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ed25519.service has finished successfully. ░░ ░░ The job identifier is 548. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@rsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@rsa.service has finished successfully. ░░ ░░ The job identifier is 547. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Reached target sshd-keygen.target. ░░ Subject: A start job for unit sshd-keygen.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen.target has finished successfully. ░░ ░░ The job identifier is 549. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Starting OpenSSH server daemon... ░░ Subject: A start job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 481. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com sshd[836]: Server listening on 0.0.0.0 port 22. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com sshd[836]: Server listening on :: port 22. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Started OpenSSH server daemon. ░░ Subject: A start job for unit sshd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has finished successfully. ░░ ░░ The job identifier is 481. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Finished Apply the settings specified in cloud-config. ░░ Subject: A start job for unit cloud-config.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.service has finished successfully. ░░ ░░ The job identifier is 220. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com rsyslogd[798]: [origin software="rsyslogd" swVersion="8.2310.0-4.el9" x-pid="798" x-info="https://www.rsyslog.com"] start Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Starting Execute cloud user/final scripts... ░░ Subject: A start job for unit cloud-final.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-final.service has begun execution. ░░ ░░ The job identifier is 224. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Starting Permit User Sessions... ░░ Subject: A start job for unit systemd-user-sessions.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-user-sessions.service has begun execution. ░░ ░░ The job identifier is 248. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Started System Logging Service. ░░ Subject: A start job for unit rsyslog.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rsyslog.service has finished successfully. ░░ ░░ The job identifier is 234. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Finished Permit User Sessions. ░░ Subject: A start job for unit systemd-user-sessions.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-user-sessions.service has finished successfully. ░░ ░░ The job identifier is 248. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Started Command Scheduler. ░░ Subject: A start job for unit crond.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit crond.service has finished successfully. ░░ ░░ The job identifier is 216. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Started Getty on tty1. ░░ Subject: A start job for unit getty@tty1.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit getty@tty1.service has finished successfully. ░░ ░░ The job identifier is 231. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Started Serial Getty on ttyS0. ░░ Subject: A start job for unit serial-getty@ttyS0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit serial-getty@ttyS0.service has finished successfully. ░░ ░░ The job identifier is 226. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Reached target Login Prompts. ░░ Subject: A start job for unit getty.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit getty.target has finished successfully. ░░ ░░ The job identifier is 225. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Reached target Multi-User System. ░░ Subject: A start job for unit multi-user.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit multi-user.target has finished successfully. ░░ ░░ The job identifier is 116. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Starting Record Runlevel Change in UTMP... ░░ Subject: A start job for unit systemd-update-utmp-runlevel.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp-runlevel.service has begun execution. ░░ ░░ The job identifier is 213. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: systemd-update-utmp-runlevel.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-update-utmp-runlevel.service has successfully entered the 'dead' state. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Finished Record Runlevel Change in UTMP. ░░ Subject: A start job for unit systemd-update-utmp-runlevel.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp-runlevel.service has finished successfully. ░░ ░░ The job identifier is 213. Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com crond[859]: (CRON) STARTUP (1.5.7) Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com crond[859]: (CRON) INFO (Syslog will be used instead of sendmail.) Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com crond[859]: (CRON) INFO (RANDOM_DELAY will be scaled with factor 68% if used.) Oct 09 01:56:06 ip-10-31-43-227.us-east-1.aws.redhat.com crond[859]: (CRON) INFO (running with inotify support) Oct 09 01:56:07 ip-10-31-43-227.us-east-1.aws.redhat.com rsyslogd[798]: imjournal: journal files changed, reloading... [v8.2310.0-4.el9 try https://www.rsyslog.com/e/0 ] Oct 09 01:56:07 ip-10-31-43-227.us-east-1.aws.redhat.com restraintd[801]: Listening on http://localhost:8081 Oct 09 01:56:07 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[866]: Cloud-init v. 23.4-19.el9 running 'modules:final' at Wed, 09 Oct 2024 05:56:07 +0000. Up 44.20 seconds. Oct 09 01:56:07 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[868]: ############################################################# Oct 09 01:56:07 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[869]: -----BEGIN SSH HOST KEY FINGERPRINTS----- Oct 09 01:56:07 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[871]: 1024 SHA256:Nk4P7dpnQRFYU6a84HF/Oe45E0Xv1AZAIlToelTgY1w root@ip-10-31-43-227.us-east-1.aws.redhat.com (DSA) Oct 09 01:56:07 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[873]: 256 SHA256:g+U9DLTuEmkl4BwztJpPQF57poe0uaLNv+mWgjUy6Nk root@ip-10-31-43-227.us-east-1.aws.redhat.com (ECDSA) Oct 09 01:56:07 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[875]: 256 SHA256:QjvQAtFc3k8gW5wRgZVNJMbsERTBE8SQM1Zs0s3L1EM root@ip-10-31-43-227.us-east-1.aws.redhat.com (ED25519) Oct 09 01:56:07 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[877]: 3072 SHA256:QV3W4smZ5vXAGhw9REXgX5Io5R8eaDBJnmbZuxWqAv8 root@ip-10-31-43-227.us-east-1.aws.redhat.com (RSA) Oct 09 01:56:07 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[878]: -----END SSH HOST KEY FINGERPRINTS----- Oct 09 01:56:07 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[879]: ############################################################# Oct 09 01:56:07 ip-10-31-43-227.us-east-1.aws.redhat.com cloud-init[866]: Cloud-init v. 23.4-19.el9 finished at Wed, 09 Oct 2024 05:56:07 +0000. Datasource DataSourceEc2Local. Up 44.43 seconds Oct 09 01:56:07 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Finished Execute cloud user/final scripts. ░░ Subject: A start job for unit cloud-final.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-final.service has finished successfully. ░░ ░░ The job identifier is 224. Oct 09 01:56:07 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Reached target Cloud-init target. ░░ Subject: A start job for unit cloud-init.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.target has finished successfully. ░░ ░░ The job identifier is 219. Oct 09 01:56:07 ip-10-31-43-227.us-east-1.aws.redhat.com kdumpctl[804]: kdump: Detected change(s) in the following file(s): /etc/fstab Oct 09 01:56:10 ip-10-31-43-227.us-east-1.aws.redhat.com chronyd[601]: Selected source 10.2.32.38 Oct 09 01:56:10 ip-10-31-43-227.us-east-1.aws.redhat.com chronyd[601]: System clock TAI offset set to 37 seconds Oct 09 01:56:13 ip-10-31-43-227.us-east-1.aws.redhat.com kernel: block xvda: the capability attribute has been deprecated. Oct 09 01:56:14 ip-10-31-43-227.us-east-1.aws.redhat.com kdumpctl[804]: kdump: Rebuilding /boot/initramfs-5.14.0-513.el9.x86_64kdump.img Oct 09 01:56:14 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Oct 09 01:56:15 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1424]: dracut-057-70.git20240819.el9 Oct 09 01:56:15 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: Executing: /usr/bin/dracut --add kdumpbase --quiet --hostonly --hostonly-cmdline --hostonly-i18n --hostonly-mode strict --hostonly-nics -o "plymouth resume ifcfg earlykdump" --mount "/dev/disk/by-uuid/efa2924f-8850-491c-a9bf-997f8385d98d /sysroot xfs rw,relatime,seclabel,attr2,inode64,logbufs=8,logbsize=32k,noquota" --squash-compressor zstd --no-hostonly-default-device -f /boot/initramfs-5.14.0-513.el9.x86_64kdump.img 5.14.0-513.el9.x86_64 Oct 09 01:56:17 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'systemd-networkd' will not be installed, because command 'networkctl' could not be found! Oct 09 01:56:17 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd' could not be found! Oct 09 01:56:17 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd-wait-online' could not be found! Oct 09 01:56:17 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'systemd-resolved' will not be installed, because command 'resolvectl' could not be found! Oct 09 01:56:17 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! Oct 09 01:56:17 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! Oct 09 01:56:17 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! Oct 09 01:56:17 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'busybox' will not be installed, because command 'busybox' could not be found! Oct 09 01:56:17 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! Oct 09 01:56:17 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'connman' will not be installed, because command 'connmand' could not be found! Oct 09 01:56:17 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'connman' will not be installed, because command 'connmanctl' could not be found! Oct 09 01:56:17 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! Oct 09 01:56:17 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'network-wicked' will not be installed, because command 'wicked' could not be found! Oct 09 01:56:17 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: Module 'ifcfg' will not be installed, because it's in the list to be omitted! Oct 09 01:56:17 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: Module 'plymouth' will not be installed, because it's in the list to be omitted! Oct 09 01:56:17 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'lvmmerge' will not be installed, because command 'lvm' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'lvmthinpool-monitor' will not be installed, because command 'lvm' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'btrfs' will not be installed, because command 'btrfs' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'dmraid' will not be installed, because command 'dmraid' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'lvm' will not be installed, because command 'lvm' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'mdraid' will not be installed, because command 'mdadm' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'pcsc' will not be installed, because command 'pcscd' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'tpm2-tss' will not be installed, because command 'tpm2' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'iscsi' will not be installed, because command 'iscsid' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'nvmf' will not be installed, because command 'nvme' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: Module 'resume' will not be installed, because it's in the list to be omitted! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'biosdevname' will not be installed, because command 'biosdevname' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: Module 'earlykdump' will not be installed, because it's in the list to be omitted! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'memstrack' will not be installed, because command 'memstrack' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: memstrack is not available Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: If you need to use rd.memdebug>=4, please install memstrack and procps-ng Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'systemd-resolved' will not be installed, because command 'resolvectl' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'busybox' will not be installed, because command 'busybox' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'connman' will not be installed, because command 'connmand' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'connman' will not be installed, because command 'connmanctl' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'network-wicked' will not be installed, because command 'wicked' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'lvmmerge' will not be installed, because command 'lvm' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'lvmthinpool-monitor' will not be installed, because command 'lvm' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'btrfs' will not be installed, because command 'btrfs' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'dmraid' will not be installed, because command 'dmraid' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'lvm' will not be installed, because command 'lvm' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'mdraid' will not be installed, because command 'mdadm' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'pcsc' will not be installed, because command 'pcscd' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'tpm2-tss' will not be installed, because command 'tpm2' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'iscsi' will not be installed, because command 'iscsid' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'nvmf' will not be installed, because command 'nvme' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: dracut module 'memstrack' will not be installed, because command 'memstrack' could not be found! Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: memstrack is not available Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: If you need to use rd.memdebug>=4, please install memstrack and procps-ng Oct 09 01:56:18 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Including module: systemd *** Oct 09 01:56:19 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Including module: systemd-initrd *** Oct 09 01:56:19 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Including module: nss-softokn *** Oct 09 01:56:19 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Including module: rngd *** Oct 09 01:56:19 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Including module: i18n *** Oct 09 01:56:19 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Including module: drm *** Oct 09 01:56:19 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Including module: prefixdevname *** Oct 09 01:56:19 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Including module: kernel-modules *** Oct 09 01:56:20 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Including module: kernel-modules-extra *** Oct 09 01:56:20 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: kernel-modules-extra: configuration source "/run/depmod.d" does not exist Oct 09 01:56:20 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: kernel-modules-extra: configuration source "/lib/depmod.d" does not exist Oct 09 01:56:20 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: kernel-modules-extra: parsing configuration file "/etc/depmod.d/dist.conf" Oct 09 01:56:20 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: kernel-modules-extra: /etc/depmod.d/dist.conf: added "updates extra built-in weak-updates" to the list of search directories Oct 09 01:56:20 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Including module: fstab-sys *** Oct 09 01:56:20 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Including module: rootfs-block *** Oct 09 01:56:20 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Including module: terminfo *** Oct 09 01:56:20 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Including module: udev-rules *** Oct 09 01:56:20 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: Skipping udev rule: 91-permissions.rules Oct 09 01:56:20 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: Skipping udev rule: 80-drivers-modprobe.rules Oct 09 01:56:20 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Including module: dracut-systemd *** Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Including module: usrmount *** Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Including module: base *** Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Including module: fs-lib *** Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Including module: kdumpbase *** Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Including module: microcode_ctl-fw_dir_override *** Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: microcode_ctl module: mangling fw_dir Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: microcode_ctl: reset fw_dir to "/lib/firmware/updates /lib/firmware" Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel"... Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: microcode_ctl: intel: caveats check for kernel version "5.14.0-513.el9.x86_64" passed, adding "/usr/share/microcode_ctl/ucode_with_caveats/intel" to fw_dir variable Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-2d-07"... Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: microcode_ctl: configuration "intel-06-2d-07" is ignored Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4e-03"... Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: microcode_ctl: configuration "intel-06-4e-03" is ignored Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4f-01"... Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: microcode_ctl: configuration "intel-06-4f-01" is ignored Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-55-04"... Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: microcode_ctl: configuration "intel-06-55-04" is ignored Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-5e-03"... Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: microcode_ctl: configuration "intel-06-5e-03" is ignored Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8c-01"... Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: microcode_ctl: configuration "intel-06-8c-01" is ignored Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8e-9e-0x-0xca"... Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: microcode_ctl: configuration "intel-06-8e-9e-0x-0xca" is ignored Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8e-9e-0x-dell"... Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: microcode_ctl: configuration "intel-06-8e-9e-0x-dell" is ignored Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: microcode_ctl: final fw_dir: "/usr/share/microcode_ctl/ucode_with_caveats/intel /lib/firmware/updates /lib/firmware" Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Including module: shutdown *** Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Including module: squash *** Oct 09 01:56:21 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Including modules done *** Oct 09 01:56:22 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Installing kernel module dependencies *** Oct 09 01:56:22 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Installing kernel module dependencies done *** Oct 09 01:56:22 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Resolving executable dependencies *** Oct 09 01:56:24 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Resolving executable dependencies done *** Oct 09 01:56:24 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Hardlinking files *** Oct 09 01:56:24 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: Mode: real Oct 09 01:56:24 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: Files: 433 Oct 09 01:56:24 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: Linked: 1 files Oct 09 01:56:24 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: Compared: 0 xattrs Oct 09 01:56:24 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: Compared: 8 files Oct 09 01:56:24 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: Saved: 56.15 KiB Oct 09 01:56:24 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: Duration: 0.006718 seconds Oct 09 01:56:24 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Hardlinking files done *** Oct 09 01:56:24 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Generating early-microcode cpio image *** Oct 09 01:56:24 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Constructing GenuineIntel.bin *** Oct 09 01:56:24 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Constructing GenuineIntel.bin *** Oct 09 01:56:24 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Store current command line parameters *** Oct 09 01:56:24 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: Stored kernel commandline: Oct 09 01:56:24 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: No dracut internal kernel commandline stored in the initramfs Oct 09 01:56:24 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Install squash loader *** Oct 09 01:56:25 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Stripping files *** Oct 09 01:56:26 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Stripping files done *** Oct 09 01:56:26 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Squashing the files inside the initramfs *** Oct 09 01:56:31 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Squashing the files inside the initramfs done *** Oct 09 01:56:31 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Creating image file '/boot/initramfs-5.14.0-513.el9.x86_64kdump.img' *** Oct 09 01:56:32 ip-10-31-43-227.us-east-1.aws.redhat.com dracut[1426]: *** Creating initramfs image file '/boot/initramfs-5.14.0-513.el9.x86_64kdump.img' done *** Oct 09 01:56:32 ip-10-31-43-227.us-east-1.aws.redhat.com kdumpctl[804]: kdump: kexec: loaded kdump kernel Oct 09 01:56:32 ip-10-31-43-227.us-east-1.aws.redhat.com kdumpctl[804]: kdump: Starting kdump: [OK] Oct 09 01:56:32 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Finished Crash recovery kernel arming. ░░ Subject: A start job for unit kdump.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit kdump.service has finished successfully. ░░ ░░ The job identifier is 205. Oct 09 01:56:32 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Startup finished in 1.082s (kernel) + 11.406s (initrd) + 57.140s (userspace) = 1min 9.629s. ░░ Subject: System start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ All system services necessary queued for starting at boot have been ░░ started. Note that this does not mean that the machine is now idle as services ░░ might still be busy with completing start-up. ░░ ░░ Kernel start-up required 1082872 microseconds. ░░ ░░ Initrd start-up required 11406236 microseconds. ░░ ░░ Userspace start-up required 57140004 microseconds. Oct 09 01:56:33 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. Oct 09 01:57:16 ip-10-31-43-227.us-east-1.aws.redhat.com chronyd[601]: Selected source 100.34.187.168 (2.centos.pool.ntp.org) Oct 09 01:58:02 ip-10-31-43-227.us-east-1.aws.redhat.com sshd[4057]: Accepted publickey for root from 10.30.33.101 port 51576 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Oct 09 01:58:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Created slice User Slice of UID 0. ░░ Subject: A start job for unit user-0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-0.slice has finished successfully. ░░ ░░ The job identifier is 552. Oct 09 01:58:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Starting User Runtime Directory /run/user/0... ░░ Subject: A start job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 551. Oct 09 01:58:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd-logind[584]: New session 1 of user root. ░░ Subject: A new session 1 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 1 has been created for the user root. ░░ ░░ The leading process of the session is 4057. Oct 09 01:58:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Finished User Runtime Directory /run/user/0. ░░ Subject: A start job for unit user-runtime-dir@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has finished successfully. ░░ ░░ The job identifier is 551. Oct 09 01:58:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Starting User Manager for UID 0... ░░ Subject: A start job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 550. Oct 09 01:58:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[4061]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0) Oct 09 01:58:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[4061]: Queued start job for default target Main User Target. Oct 09 01:58:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[4061]: Created slice User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Oct 09 01:58:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[4061]: Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system). ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Oct 09 01:58:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[4061]: Started Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Oct 09 01:58:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[4061]: Reached target Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Oct 09 01:58:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[4061]: Reached target Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Oct 09 01:58:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[4061]: Starting D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 4. Oct 09 01:58:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[4061]: Starting Create User's Volatile Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 12. Oct 09 01:58:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[4061]: Finished Create User's Volatile Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Oct 09 01:58:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[4061]: Listening on D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Oct 09 01:58:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[4061]: Reached target Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Oct 09 01:58:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[4061]: Reached target Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Oct 09 01:58:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[4061]: Reached target Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Oct 09 01:58:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[4061]: Startup finished in 206ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 0 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 206845 microseconds. Oct 09 01:58:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Started User Manager for UID 0. ░░ Subject: A start job for unit user@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has finished successfully. ░░ ░░ The job identifier is 550. Oct 09 01:58:02 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Started Session 1 of User root. ░░ Subject: A start job for unit session-1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-1.scope has finished successfully. ░░ ░░ The job identifier is 616. Oct 09 01:58:02 ip-10-31-43-227.us-east-1.aws.redhat.com sshd[4057]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Oct 09 01:58:03 ip-10-31-43-227.us-east-1.aws.redhat.com sshd[4070]: Received disconnect from 10.30.33.101 port 51576:11: disconnected by user Oct 09 01:58:03 ip-10-31-43-227.us-east-1.aws.redhat.com sshd[4070]: Disconnected from user root 10.30.33.101 port 51576 Oct 09 01:58:03 ip-10-31-43-227.us-east-1.aws.redhat.com sshd[4057]: pam_unix(sshd:session): session closed for user root Oct 09 01:58:03 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: session-1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-1.scope has successfully entered the 'dead' state. Oct 09 01:58:03 ip-10-31-43-227.us-east-1.aws.redhat.com systemd-logind[584]: Session 1 logged out. Waiting for processes to exit. Oct 09 01:58:03 ip-10-31-43-227.us-east-1.aws.redhat.com systemd-logind[584]: Removed session 1. ░░ Subject: Session 1 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 1 has been terminated. Oct 09 01:58:05 ip-10-31-43-227.us-east-1.aws.redhat.com sshd[4099]: Accepted publickey for root from 10.31.10.32 port 46532 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Oct 09 01:58:05 ip-10-31-43-227.us-east-1.aws.redhat.com sshd[4100]: Accepted publickey for root from 10.31.10.32 port 46544 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Oct 09 01:58:05 ip-10-31-43-227.us-east-1.aws.redhat.com systemd-logind[584]: New session 3 of user root. ░░ Subject: A new session 3 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 3 has been created for the user root. ░░ ░░ The leading process of the session is 4099. Oct 09 01:58:05 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Started Session 3 of User root. ░░ Subject: A start job for unit session-3.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-3.scope has finished successfully. ░░ ░░ The job identifier is 683. Oct 09 01:58:05 ip-10-31-43-227.us-east-1.aws.redhat.com systemd-logind[584]: New session 4 of user root. ░░ Subject: A new session 4 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 4 has been created for the user root. ░░ ░░ The leading process of the session is 4100. Oct 09 01:58:05 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Started Session 4 of User root. ░░ Subject: A start job for unit session-4.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-4.scope has finished successfully. ░░ ░░ The job identifier is 750. Oct 09 01:58:05 ip-10-31-43-227.us-east-1.aws.redhat.com sshd[4099]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Oct 09 01:58:05 ip-10-31-43-227.us-east-1.aws.redhat.com sshd[4100]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Oct 09 01:58:05 ip-10-31-43-227.us-east-1.aws.redhat.com sshd[4106]: Received disconnect from 10.31.10.32 port 46544:11: disconnected by user Oct 09 01:58:05 ip-10-31-43-227.us-east-1.aws.redhat.com sshd[4106]: Disconnected from user root 10.31.10.32 port 46544 Oct 09 01:58:05 ip-10-31-43-227.us-east-1.aws.redhat.com sshd[4100]: pam_unix(sshd:session): session closed for user root Oct 09 01:58:05 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: session-4.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-4.scope has successfully entered the 'dead' state. Oct 09 01:58:05 ip-10-31-43-227.us-east-1.aws.redhat.com systemd-logind[584]: Session 4 logged out. Waiting for processes to exit. Oct 09 01:58:05 ip-10-31-43-227.us-east-1.aws.redhat.com systemd-logind[584]: Removed session 4. ░░ Subject: Session 4 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 4 has been terminated. Oct 09 02:00:18 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Starting Hostname Service... ░░ Subject: A start job for unit systemd-hostnamed.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has begun execution. ░░ ░░ The job identifier is 818. Oct 09 02:00:18 ip-10-31-43-227.us-east-1.aws.redhat.com systemd[1]: Started Hostname Service. ░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has finished successfully. ░░ ░░ The job identifier is 818. Oct 09 02:00:18 managed-node3 systemd-hostnamed[5464]: Hostname set to (static) Oct 09 02:00:18 managed-node3 NetworkManager[629]: [1728453618.4188] hostname: static hostname changed from "ip-10-31-43-227.us-east-1.aws.redhat.com" to "managed-node3" Oct 09 02:00:18 managed-node3 systemd[1]: Starting Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 882. Oct 09 02:00:18 managed-node3 systemd[1]: Started Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 882. Oct 09 02:00:28 managed-node3 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Oct 09 02:00:48 managed-node3 systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. Oct 09 02:01:01 managed-node3 CROND[6155]: (root) CMD (run-parts /etc/cron.hourly) Oct 09 02:01:01 managed-node3 run-parts[6158]: (/etc/cron.hourly) starting 0anacron Oct 09 02:01:01 managed-node3 anacron[6166]: Anacron started on 2024-10-09 Oct 09 02:01:01 managed-node3 run-parts[6168]: (/etc/cron.hourly) finished 0anacron Oct 09 02:01:01 managed-node3 CROND[6154]: (root) CMDEND (run-parts /etc/cron.hourly) Oct 09 02:01:01 managed-node3 anacron[6166]: Will run job `cron.daily' in 19 min. Oct 09 02:01:01 managed-node3 anacron[6166]: Will run job `cron.weekly' in 39 min. Oct 09 02:01:01 managed-node3 anacron[6166]: Will run job `cron.monthly' in 59 min. Oct 09 02:01:01 managed-node3 anacron[6166]: Jobs will be executed sequentially Oct 09 02:01:47 managed-node3 sshd[6169]: Accepted publickey for root from 10.31.14.104 port 50410 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Oct 09 02:01:47 managed-node3 systemd-logind[584]: New session 5 of user root. ░░ Subject: A new session 5 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 5 has been created for the user root. ░░ ░░ The leading process of the session is 6169. Oct 09 02:01:47 managed-node3 systemd[1]: Started Session 5 of User root. ░░ Subject: A start job for unit session-5.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-5.scope has finished successfully. ░░ ░░ The job identifier is 946. Oct 09 02:01:47 managed-node3 sshd[6169]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Oct 09 02:01:49 managed-node3 python3.9[6297]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Oct 09 02:01:50 managed-node3 python3.9[6430]: ansible-service_facts Invoked Oct 09 02:01:54 managed-node3 python3.9[6621]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Oct 09 02:01:54 managed-node3 python3.9[6728]: ansible-ansible.legacy.dnf Invoked with name=['pcp', 'pcp-zeroconf'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Oct 09 02:02:17 managed-node3 kernel: SELinux: Converting 376 SID table entries... Oct 09 02:02:17 managed-node3 kernel: SELinux: policy capability network_peer_controls=1 Oct 09 02:02:17 managed-node3 kernel: SELinux: policy capability open_perms=1 Oct 09 02:02:17 managed-node3 kernel: SELinux: policy capability extended_socket_class=1 Oct 09 02:02:17 managed-node3 kernel: SELinux: policy capability always_check_network=0 Oct 09 02:02:17 managed-node3 kernel: SELinux: policy capability cgroup_seclabel=1 Oct 09 02:02:17 managed-node3 kernel: SELinux: policy capability nnp_nosuid_transition=1 Oct 09 02:02:17 managed-node3 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Oct 09 02:02:20 managed-node3 dbus-broker-launch[578]: avc: op=load_policy lsm=selinux seqno=2 res=1 Oct 09 02:02:21 managed-node3 systemd[1]: Starting PCP Reboot Initialization Helper Service... ░░ Subject: A start job for unit pcp-reboot-init.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pcp-reboot-init.service has begun execution. ░░ ░░ The job identifier is 1013. Oct 09 02:02:21 managed-node3 systemd[1]: Starting Performance Metrics Collector Daemon... ░░ Subject: A start job for unit pmcd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has begun execution. ░░ ░░ The job identifier is 1016. Oct 09 02:02:21 managed-node3 systemd[1]: Finished PCP Reboot Initialization Helper Service. ░░ Subject: A start job for unit pcp-reboot-init.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pcp-reboot-init.service has finished successfully. ░░ ░░ The job identifier is 1013. Oct 09 02:02:21 managed-node3 systemd[1]: Started Performance Metrics Collector Daemon. ░░ Subject: A start job for unit pmcd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has finished successfully. ░░ ░░ The job identifier is 1016. Oct 09 02:02:21 managed-node3 systemd[1]: Starting Performance Metrics Inference Engine... ░░ Subject: A start job for unit pmie.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie.service has begun execution. ░░ ░░ The job identifier is 1147. Oct 09 02:02:21 managed-node3 systemd[1]: Starting Performance Metrics Archive Logger... ░░ Subject: A start job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 1079. Oct 09 02:02:21 managed-node3 pmcd[7628]: Installing dm PMDA ... Oct 09 02:02:21 managed-node3 rc[7485]: /etc/pcp/pmie/rc: Warning: Performance Co-Pilot Inference Engine (pmie) not permanently enabled. Oct 09 02:02:21 managed-node3 rc[7485]: To enable pmie, run the following as root: Oct 09 02:02:21 managed-node3 rc[7485]: # /usr/bin/systemctl enable pmie.service Oct 09 02:02:21 managed-node3 rc[7488]: /etc/pcp/pmlogger/rc: Warning: Performance Co-Pilot archive logger(s) not permanently enabled. Oct 09 02:02:21 managed-node3 rc[7488]: To enable pmlogger, run the following as root: Oct 09 02:02:21 managed-node3 rc[7488]: # /usr/bin/systemctl enable pmlogger.service Oct 09 02:02:21 managed-node3 systemd[1]: Started Performance Metrics Inference Engine. ░░ Subject: A start job for unit pmie.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie.service has finished successfully. ░░ ░░ The job identifier is 1147. Oct 09 02:02:21 managed-node3 systemd[1]: Started Half-hourly check of PMIE instances. ░░ Subject: A start job for unit pmie_check.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_check.timer has finished successfully. ░░ ░░ The job identifier is 1213. Oct 09 02:02:21 managed-node3 systemd[1]: Started Daily processing of PMIE logs. ░░ Subject: A start job for unit pmie_daily.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_daily.timer has finished successfully. ░░ ░░ The job identifier is 1212. Oct 09 02:02:21 managed-node3 systemd[1]: Starting pmie farm service... ░░ Subject: A start job for unit pmie_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm.service has begun execution. ░░ ░░ The job identifier is 1148. Oct 09 02:02:21 managed-node3 systemd[1]: Started pmie farm service. ░░ Subject: A start job for unit pmie_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm.service has finished successfully. ░░ ░░ The job identifier is 1148. Oct 09 02:02:21 managed-node3 systemd[1]: Started Half-hourly check of pmie farm instances. ░░ Subject: A start job for unit pmie_farm_check.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm_check.timer has finished successfully. ░░ ░░ The job identifier is 1149. Oct 09 02:02:21 managed-node3 systemd[1]: Starting Check PMIE instances are running... ░░ Subject: A start job for unit pmie_check.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_check.service has begun execution. ░░ ░░ The job identifier is 1215. Oct 09 02:02:21 managed-node3 systemd[1]: Starting Check and migrate non-primary pmie farm instances... ░░ Subject: A start job for unit pmie_farm_check.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm_check.service has begun execution. ░░ ░░ The job identifier is 1278. Oct 09 02:02:21 managed-node3 systemd[1]: Started Check PMIE instances are running. ░░ Subject: A start job for unit pmie_check.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_check.service has finished successfully. ░░ ░░ The job identifier is 1215. Oct 09 02:02:21 managed-node3 systemd[1]: Started Check and migrate non-primary pmie farm instances. ░░ Subject: A start job for unit pmie_farm_check.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm_check.service has finished successfully. ░░ ░░ The job identifier is 1278. Oct 09 02:02:22 managed-node3 systemd[1]: pmie_farm_check.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmie_farm_check.service has successfully entered the 'dead' state. Oct 09 02:02:22 managed-node3 systemd[1]: pmie_check.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmie_check.service has successfully entered the 'dead' state. Oct 09 02:02:22 managed-node3 systemd[1]: Started Performance Metrics Archive Logger. ░░ Subject: A start job for unit pmlogger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has finished successfully. ░░ ░░ The job identifier is 1079. Oct 09 02:02:22 managed-node3 systemd[1]: Started Half-hourly check of pmlogger instances. ░░ Subject: A start job for unit pmlogger_check.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_check.timer has finished successfully. ░░ ░░ The job identifier is 1142. Oct 09 02:02:22 managed-node3 systemd[1]: Started Daily processing of archives. ░░ Subject: A start job for unit pmlogger_daily.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_daily.timer has finished successfully. ░░ ░░ The job identifier is 1145. Oct 09 02:02:22 managed-node3 systemd[1]: Starting pmlogger farm service... ░░ Subject: A start job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 1143. Oct 09 02:02:22 managed-node3 systemd[1]: Starting Check pmlogger instances are running... ░░ Subject: A start job for unit pmlogger_check.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_check.service has begun execution. ░░ ░░ The job identifier is 1341. Oct 09 02:02:22 managed-node3 systemd[1]: Started pmlogger farm service. ░░ Subject: A start job for unit pmlogger_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has finished successfully. ░░ ░░ The job identifier is 1143. Oct 09 02:02:22 managed-node3 systemd[1]: Started Half-hourly check of pmlogger farm instances. ░░ Subject: A start job for unit pmlogger_farm_check.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm_check.timer has finished successfully. ░░ ░░ The job identifier is 1144. Oct 09 02:02:22 managed-node3 systemd[1]: Starting Check and migrate non-primary pmlogger farm instances... ░░ Subject: A start job for unit pmlogger_farm_check.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm_check.service has begun execution. ░░ ░░ The job identifier is 1404. Oct 09 02:02:22 managed-node3 systemd[1]: Started Check and migrate non-primary pmlogger farm instances. ░░ Subject: A start job for unit pmlogger_farm_check.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm_check.service has finished successfully. ░░ ░░ The job identifier is 1404. Oct 09 02:02:22 managed-node3 systemd[1]: Started Check pmlogger instances are running. ░░ Subject: A start job for unit pmlogger_check.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_check.service has finished successfully. ░░ ░░ The job identifier is 1341. Oct 09 02:02:22 managed-node3 systemd[1]: Reloading. Oct 09 02:02:23 managed-node3 systemd-rc-local-generator[8555]: /etc/rc.d/rc.local is not marked executable, skipping. Oct 09 02:02:23 managed-node3 systemd[1]: pmlogger_farm_check.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger_farm_check.service has successfully entered the 'dead' state. Oct 09 02:02:24 managed-node3 systemd[1]: pmlogger_check.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger_check.service has successfully entered the 'dead' state. Oct 09 02:02:24 managed-node3 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r454a1e53687b448d91133c14616a7f5c.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r454a1e53687b448d91133c14616a7f5c.service has finished successfully. ░░ ░░ The job identifier is 1467. Oct 09 02:02:24 managed-node3 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1530. Oct 09 02:02:24 managed-node3 systemd[1]: Reloading. Oct 09 02:02:24 managed-node3 systemd-rc-local-generator[9517]: /etc/rc.d/rc.local is not marked executable, skipping. Oct 09 02:02:24 managed-node3 systemd[1]: Queuing reload/restart jobs for marked units… Oct 09 02:02:26 managed-node3 python3.9[10808]: ansible-ansible.legacy.dnf Invoked with name=['cyrus-sasl-lib', 'cyrus-sasl-scram'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Oct 09 02:02:29 managed-node3 kernel: device-mapper: core: CONFIG_IMA_DISABLE_HTABLE is disabled. Duplicate IMA measurements will not be recorded in the IMA log. Oct 09 02:02:29 managed-node3 kernel: device-mapper: uevent: version 1.0.3 Oct 09 02:02:29 managed-node3 kernel: device-mapper: ioctl: 4.48.0-ioctl (2023-03-01) initialised: dm-devel@redhat.com Oct 09 02:02:29 managed-node3 pmcd[13167]: Installing nfsclient PMDA ... Oct 09 02:02:29 managed-node3 python3.9[13212]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/pcp/pmcd/pmcd.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 09 02:02:30 managed-node3 python3.9[13701]: ansible-file Invoked with path=/etc/pcp/labels state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:30 managed-node3 python3.9[14362]: ansible-file Invoked with path=/etc/pcp/labels/optional state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:31 managed-node3 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Oct 09 02:02:31 managed-node3 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1530. Oct 09 02:02:31 managed-node3 systemd[1]: man-db-cache-update.service: Consumed 2.955s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service completed and consumed the indicated resources. Oct 09 02:02:31 managed-node3 systemd[1]: run-r454a1e53687b448d91133c14616a7f5c.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r454a1e53687b448d91133c14616a7f5c.service has successfully entered the 'dead' state. Oct 09 02:02:31 managed-node3 python3.9[14596]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:02:31 managed-node3 python3.9[14761]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1728453751.0176325-7575-207001341105046/.source dest=/etc/pcp/labels/ansible-managed mode=0644 follow=False _original_basename=pmcd.explicit.labels.j2 checksum=5f36b2ea290645ee34d943220a14b54ee5ea5be5 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:32 managed-node3 python3.9[14868]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/optional/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:02:32 managed-node3 python3.9[14953]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1728453751.9360037-7617-102917856650481/.source dest=/etc/pcp/labels/optional/ansible-managed mode=0644 follow=False _original_basename=pmcd.implicit.labels.j2 checksum=5f36b2ea290645ee34d943220a14b54ee5ea5be5 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:32 managed-node3 python3.9[15065]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmcd follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:02:33 managed-node3 pmcd[15078]: Installing openmetrics PMDA ... Oct 09 02:02:33 managed-node3 pmcd[15109]: [Wed Oct 9 02:02:33] pmdaopenmetrics(15109) Info: Initializing ... currently in notready state. Oct 09 02:02:33 managed-node3 pmcd[15109]: [Wed Oct 9 02:02:33] pmdaopenmetrics(15109) Info: Config change detected, traversed 3 config entries in 0.0001s, rescanning ... Oct 09 02:02:33 managed-node3 pmcd[15109]: [Wed Oct 9 02:02:33] pmdaopenmetrics(15109) Info: Found source grafana cluster 1 Oct 09 02:02:33 managed-node3 pmcd[15109]: [Wed Oct 9 02:02:33] pmdaopenmetrics(15109) Info: Found source kepler cluster 2 Oct 09 02:02:33 managed-node3 pmcd[15109]: [Wed Oct 9 02:02:33] pmdaopenmetrics(15109) Info: Found source vllm cluster 3 Oct 09 02:02:33 managed-node3 pmcd[15109]: [Wed Oct 9 02:02:33] pmdaopenmetrics(15109) Info: Ready to process requests Oct 09 02:02:33 managed-node3 pmcd[15116]: [Wed Oct 9 02:02:33] pmdaopenmetrics(15116) Info: Initializing ... currently in notready state. Oct 09 02:02:33 managed-node3 pmcd[15116]: [Wed Oct 9 02:02:33] pmdaopenmetrics(15116) Info: Config change detected, traversed 3 config entries in 0.0001s, rescanning ... Oct 09 02:02:33 managed-node3 pmcd[15116]: [Wed Oct 9 02:02:33] pmdaopenmetrics(15116) Info: Found source grafana cluster 1 Oct 09 02:02:33 managed-node3 pmcd[15116]: [Wed Oct 9 02:02:33] pmdaopenmetrics(15116) Info: Found source kepler cluster 2 Oct 09 02:02:33 managed-node3 pmcd[15116]: [Wed Oct 9 02:02:33] pmdaopenmetrics(15116) Info: Found source vllm cluster 3 Oct 09 02:02:33 managed-node3 pmcd[15116]: [Wed Oct 9 02:02:33] pmdaopenmetrics(15116) Info: Ready to process requests Oct 09 02:02:34 managed-node3 python3.9[15289]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1728453752.6668186-7656-233643925862033/.source dest=/etc/sysconfig/pmcd mode=0644 follow=False _original_basename=pmcd.defaults.j2 checksum=7518789c091387cd9c322e1a8fa8aad21d4efbd3 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:34 managed-node3 python3.9[15396]: ansible-user Invoked with name=metrics system=True state=present non_unique=False force=False remove=False create_home=True move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node3 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Oct 09 02:02:35 managed-node3 useradd[15398]: new group: name=metrics, GID=992 Oct 09 02:02:35 managed-node3 useradd[15398]: new user: name=metrics, UID=992, GID=992, home=/home/metrics, shell=/bin/bash, from=/dev/pts/0 Oct 09 02:02:35 managed-node3 python3.9[15607]: ansible-ansible.legacy.command Invoked with _raw_params=set -eu if set -o | grep -q pipefail; then set -o pipefail # pipefail not supported on debian, some ubuntu fi if ! sasldblistusers2 -f "/etc/pcp/passwd.db" | grep -q "^metrics@"; then echo "Creating new metrics user in /etc/pcp/passwd.db" echo "metrics" | saslpasswd2 -a pmcd "metrics" chown root:pcp "/etc/pcp/passwd.db" chmod 640 "/etc/pcp/passwd.db" fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 09 02:02:35 managed-node3 sasldblistusers2[15611]: SASL error opening password file. Have you performed the migration from db2 using cyrusbdb2current? Oct 09 02:02:35 managed-node3 sasldblistusers2[15611]: _sasldb_getkeyhandle has failed Oct 09 02:02:36 managed-node3 python3.9[15723]: ansible-ansible.legacy.stat Invoked with path=/etc/sasl2/pmcd.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:02:36 managed-node3 python3.9[15810]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1728453755.7771132-7762-247480844215556/.source.conf dest=/etc/sasl2/pmcd.conf mode=0644 follow=False _original_basename=pmcd.sasl2.conf.j2 checksum=615d2de55ab86108da0c7e6b64988fecb4169771 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:37 managed-node3 python3.9[15922]: ansible-ansible.legacy.systemd Invoked with name=pmcd state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Oct 09 02:02:37 managed-node3 systemd[1]: Stopping Performance Metrics Collector Daemon... ░░ Subject: A stop job for unit pmcd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmcd.service has begun execution. ░░ ░░ The job identifier is 1594. Oct 09 02:02:37 managed-node3 pmcd[16011]: pmprobe: Cannot connect to PMCD on host "local:": IPC protocol failure Oct 09 02:02:38 managed-node3 systemd[1]: pmcd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmcd.service has successfully entered the 'dead' state. Oct 09 02:02:38 managed-node3 systemd[1]: Stopped Performance Metrics Collector Daemon. ░░ Subject: A stop job for unit pmcd.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmcd.service has finished. ░░ ░░ The job identifier is 1594 and the job result is done. Oct 09 02:02:38 managed-node3 systemd[1]: pmcd.service: Consumed 4.331s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmcd.service completed and consumed the indicated resources. Oct 09 02:02:38 managed-node3 systemd[1]: Starting Performance Metrics Collector Daemon... ░░ Subject: A start job for unit pmcd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has begun execution. ░░ ░░ The job identifier is 1594. Oct 09 02:02:38 managed-node3 systemd[1]: Started Performance Metrics Collector Daemon. ░░ Subject: A start job for unit pmcd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has finished successfully. ░░ ░░ The job identifier is 1594. Oct 09 02:02:38 managed-node3 python3.9[16533]: ansible-file Invoked with path=/etc/pcp/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:39 managed-node3 python3.9[16640]: ansible-file Invoked with path=/etc/pcp/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:39 managed-node3 python3.9[16747]: ansible-file Invoked with path=/etc/pcp/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:39 managed-node3 python3.9[16854]: ansible-file Invoked with path=/etc/pcp/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:40 managed-node3 python3.9[16961]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:40 managed-node3 python3.9[17068]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:40 managed-node3 python3.9[17175]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:41 managed-node3 python3.9[17282]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:41 managed-node3 python3.9[17389]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcplistenoverflows follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:02:41 managed-node3 python3.9[17476]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1728453761.3268468-7965-160403041742599/.source dest=/etc/pcp/pmieconf/network/tcplistenoverflows owner=root group=root mode=0644 _original_basename=tcplistenoverflows follow=False checksum=608d8a6ac6ee33bb86b77d28ba24fbcd378db43d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:42 managed-node3 python3.9[17583]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldocookies follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:02:42 managed-node3 python3.9[17670]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1728453761.9576635-7965-136859983589127/.source dest=/etc/pcp/pmieconf/network/tcpqfulldocookies owner=root group=root mode=0644 _original_basename=tcpqfulldocookies follow=False checksum=3256a5c2e8d07a20d8e97a08c0ab163252b0beae backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:42 managed-node3 python3.9[17777]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldrops follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:02:43 managed-node3 python3.9[17864]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1728453762.5832322-7965-184421419279995/.source dest=/etc/pcp/pmieconf/network/tcpqfulldrops owner=root group=root mode=0644 _original_basename=tcpqfulldrops follow=False checksum=37b2bd7f2430bd9678ab078c5e69a53bea556524 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:43 managed-node3 python3.9[17971]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/power/thermal_throttle follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:02:43 managed-node3 python3.9[18058]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1728453763.2513487-7965-78043188811277/.source dest=/etc/pcp/pmieconf/power/thermal_throttle owner=root group=root mode=0644 _original_basename=thermal_throttle follow=False checksum=1d53d6182709617c8f633339652d8d9e75f3b603 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:44 managed-node3 python3.9[18165]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/zeroconf/all_threads follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:02:44 managed-node3 python3.9[18252]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1728453763.878799-7965-75781586877072/.source dest=/etc/pcp/pmieconf/zeroconf/all_threads owner=root group=root mode=0644 _original_basename=all_threads follow=False checksum=65169db16dcaa224c211373001adc3addf1031c4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:44 managed-node3 python3.9[18359]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/filesys/vfs_files follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:02:44 managed-node3 python3.9[18413]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/filesys/vfs_files _original_basename=vfs_files recurse=False state=file path=/etc/pcp/pmieconf/filesys/vfs_files force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:45 managed-node3 python3.9[18520]: ansible-lineinfile Invoked with state=absent path=/var/lib/pcp/config/pmie/config.default regexp=//.*global webhook_endpoint = "" backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None line=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:45 managed-node3 python3.9[18627]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcplistenoverflows dest=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:46 managed-node3 python3.9[18734]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldocookies dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:46 managed-node3 python3.9[18841]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldrops dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:47 managed-node3 python3.9[18948]: ansible-file Invoked with src=/etc/pcp/pmieconf/power/thermal_throttle dest=/var/lib/pcp/config/pmieconf/power/thermal_throttle state=link force=True path=/var/lib/pcp/config/pmieconf/power/thermal_throttle recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:47 managed-node3 python3.9[19055]: ansible-file Invoked with src=/etc/pcp/pmieconf/zeroconf/all_threads dest=/var/lib/pcp/config/pmieconf/zeroconf/all_threads state=link force=True path=/var/lib/pcp/config/pmieconf/zeroconf/all_threads recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:47 managed-node3 python3.9[19162]: ansible-file Invoked with src=/etc/pcp/pmieconf/filesys/vfs_files dest=/var/lib/pcp/config/pmieconf/filesys/vfs_files state=link force=True path=/var/lib/pcp/config/pmieconf/filesys/vfs_files recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:48 managed-node3 python3.9[19269]: ansible-ansible.legacy.systemd Invoked with name=pmie state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Oct 09 02:02:48 managed-node3 systemd[1]: Stopping pmie farm service... ░░ Subject: A stop job for unit pmie_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie_farm.service has begun execution. ░░ ░░ The job identifier is 1725. Oct 09 02:02:48 managed-node3 systemd[1]: pmie_farm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmie_farm.service has successfully entered the 'dead' state. Oct 09 02:02:48 managed-node3 systemd[1]: Stopped pmie farm service. ░░ Subject: A stop job for unit pmie_farm.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie_farm.service has finished. ░░ ░░ The job identifier is 1725 and the job result is done. Oct 09 02:02:48 managed-node3 systemd[1]: Stopping Performance Metrics Inference Engine... ░░ Subject: A stop job for unit pmie.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie.service has begun execution. ░░ ░░ The job identifier is 1657. Oct 09 02:02:48 managed-node3 systemd[1]: pmie.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmie.service has successfully entered the 'dead' state. Oct 09 02:02:48 managed-node3 systemd[1]: Stopped Performance Metrics Inference Engine. ░░ Subject: A stop job for unit pmie.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie.service has finished. ░░ ░░ The job identifier is 1657 and the job result is done. Oct 09 02:02:48 managed-node3 systemd[1]: Starting Performance Metrics Inference Engine... ░░ Subject: A start job for unit pmie.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie.service has begun execution. ░░ ░░ The job identifier is 1657. Oct 09 02:02:48 managed-node3 systemd[1]: Started Performance Metrics Inference Engine. ░░ Subject: A start job for unit pmie.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie.service has finished successfully. ░░ ░░ The job identifier is 1657. Oct 09 02:02:48 managed-node3 systemd[1]: Starting pmie farm service... ░░ Subject: A start job for unit pmie_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm.service has begun execution. ░░ ░░ The job identifier is 1725. Oct 09 02:02:49 managed-node3 systemd[1]: Started pmie farm service. ░░ Subject: A start job for unit pmie_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm.service has finished successfully. ░░ ░░ The job identifier is 1725. Oct 09 02:02:49 managed-node3 python3.9[19900]: ansible-lineinfile Invoked with path=/etc/pcp.conf regexp=^PCP_ARCHIVE_DIR= line=PCP_ARCHIVE_DIR=/var/log/pcp/pmlogger state=present backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:49 managed-node3 python3.9[20007]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:02:50 managed-node3 python3.9[20094]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1728453769.5407627-8387-107158035789451/.source dest=/etc/sysconfig/pmlogger mode=0644 follow=False _original_basename=pmlogger.defaults.j2 checksum=67bc35973101c614e92b1990f8bebfffc39fe498 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:50 managed-node3 python3.9[20201]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger_timers follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:02:50 managed-node3 python3.9[20288]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1728453770.2565753-8420-154612357017574/.source dest=/etc/sysconfig/pmlogger_timers mode=0644 follow=False _original_basename=pmlogger.timers.j2 checksum=df7bd3b5b6f1de3af164aab81441c7251a13a298 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:02:51 managed-node3 python3.9[20395]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Oct 09 02:02:51 managed-node3 systemd[1]: Stopping pmlogger farm service... ░░ Subject: A stop job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 1794. Oct 09 02:02:51 managed-node3 systemd[1]: pmlogger_farm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger_farm.service has successfully entered the 'dead' state. Oct 09 02:02:51 managed-node3 systemd[1]: Stopped pmlogger farm service. ░░ Subject: A stop job for unit pmlogger_farm.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has finished. ░░ ░░ The job identifier is 1794 and the job result is done. Oct 09 02:02:51 managed-node3 systemd[1]: Stopping Performance Metrics Archive Logger... ░░ Subject: A stop job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 1726. Oct 09 02:02:51 managed-node3 systemd[1]: pmlogger.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service has successfully entered the 'dead' state. Oct 09 02:02:51 managed-node3 systemd[1]: Stopped Performance Metrics Archive Logger. ░░ Subject: A stop job for unit pmlogger.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has finished. ░░ ░░ The job identifier is 1726 and the job result is done. Oct 09 02:02:51 managed-node3 systemd[1]: pmlogger.service: Consumed 1.472s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service completed and consumed the indicated resources. Oct 09 02:02:51 managed-node3 systemd[1]: Starting Performance Metrics Archive Logger... ░░ Subject: A start job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 1726. Oct 09 02:02:52 managed-node3 systemd[1]: Started Performance Metrics Archive Logger. ░░ Subject: A start job for unit pmlogger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has finished successfully. ░░ ░░ The job identifier is 1726. Oct 09 02:02:52 managed-node3 systemd[1]: Starting pmlogger farm service... ░░ Subject: A start job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 1794. Oct 09 02:02:52 managed-node3 systemd[1]: Started pmlogger farm service. ░░ Subject: A start job for unit pmlogger_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has finished successfully. ░░ ░░ The job identifier is 1794. Oct 09 02:02:53 managed-node3 python3.9[21408]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Oct 09 02:02:53 managed-node3 systemd[1]: Stopping pmlogger farm service... ░░ Subject: A stop job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 1863. Oct 09 02:02:53 managed-node3 systemd[1]: pmlogger_farm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger_farm.service has successfully entered the 'dead' state. Oct 09 02:02:53 managed-node3 systemd[1]: Stopped pmlogger farm service. ░░ Subject: A stop job for unit pmlogger_farm.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has finished. ░░ ░░ The job identifier is 1863 and the job result is done. Oct 09 02:02:53 managed-node3 systemd[1]: Stopping Performance Metrics Archive Logger... ░░ Subject: A stop job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 1795. Oct 09 02:02:53 managed-node3 systemd[1]: pmlogger.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service has successfully entered the 'dead' state. Oct 09 02:02:53 managed-node3 systemd[1]: Stopped Performance Metrics Archive Logger. ░░ Subject: A stop job for unit pmlogger.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has finished. ░░ ░░ The job identifier is 1795 and the job result is done. Oct 09 02:02:53 managed-node3 systemd[1]: pmlogger.service: Consumed 1.228s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service completed and consumed the indicated resources. Oct 09 02:02:53 managed-node3 systemd[1]: Starting Performance Metrics Archive Logger... ░░ Subject: A start job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 1795. Oct 09 02:02:54 managed-node3 systemd[1]: Started Performance Metrics Archive Logger. ░░ Subject: A start job for unit pmlogger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has finished successfully. ░░ ░░ The job identifier is 1795. Oct 09 02:02:54 managed-node3 systemd[1]: Starting pmlogger farm service... ░░ Subject: A start job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 1863. Oct 09 02:02:54 managed-node3 systemd[1]: Started pmlogger farm service. ░░ Subject: A start job for unit pmlogger_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has finished successfully. ░░ ░░ The job identifier is 1863. Oct 09 02:02:54 managed-node3 python3.9[22071]: ansible-service_facts Invoked Oct 09 02:02:58 managed-node3 python3.9[22646]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Oct 09 02:02:59 managed-node3 python3.9[22779]: ansible-service_facts Invoked Oct 09 02:03:01 managed-node3 python3.9[22971]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Oct 09 02:03:02 managed-node3 python3.9[23078]: ansible-ansible.legacy.dnf Invoked with name=['pcp', 'pcp-zeroconf'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Oct 09 02:03:03 managed-node3 python3.9[23186]: ansible-ansible.legacy.dnf Invoked with name=['cyrus-sasl-lib', 'cyrus-sasl-scram'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Oct 09 02:03:05 managed-node3 python3.9[23294]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/pcp/pmcd/pmcd.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 09 02:03:06 managed-node3 python3.9[23402]: ansible-file Invoked with path=/etc/pcp/labels state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:07 managed-node3 python3.9[23509]: ansible-file Invoked with path=/etc/pcp/labels/optional state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:07 managed-node3 python3.9[23616]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:03:07 managed-node3 python3.9[23670]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/pcp/labels/ansible-managed _original_basename=pmcd.explicit.labels.j2 recurse=False state=file path=/etc/pcp/labels/ansible-managed force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:08 managed-node3 python3.9[23777]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/optional/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:03:08 managed-node3 python3.9[23831]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/pcp/labels/optional/ansible-managed _original_basename=pmcd.implicit.labels.j2 recurse=False state=file path=/etc/pcp/labels/optional/ansible-managed force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:08 managed-node3 systemd[4061]: Created slice User Background Tasks Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Oct 09 02:03:08 managed-node3 python3.9[23938]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmcd follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:03:08 managed-node3 systemd[4061]: Starting Cleanup of User's Temporary Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Oct 09 02:03:08 managed-node3 systemd[4061]: Finished Cleanup of User's Temporary Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Oct 09 02:03:09 managed-node3 python3.9[23993]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmcd _original_basename=pmcd.defaults.j2 recurse=False state=file path=/etc/sysconfig/pmcd force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:09 managed-node3 python3.9[24100]: ansible-user Invoked with name=metrics system=True state=present non_unique=False force=False remove=False create_home=True move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node3 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Oct 09 02:03:10 managed-node3 python3.9[24209]: ansible-ansible.legacy.command Invoked with _raw_params=set -eu if set -o | grep -q pipefail; then set -o pipefail # pipefail not supported on debian, some ubuntu fi if ! sasldblistusers2 -f "/etc/pcp/passwd.db" | grep -q "^metrics@"; then echo "Creating new metrics user in /etc/pcp/passwd.db" echo "metrics" | saslpasswd2 -a pmcd "metrics" chown root:pcp "/etc/pcp/passwd.db" chmod 640 "/etc/pcp/passwd.db" fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 09 02:03:10 managed-node3 python3.9[24321]: ansible-ansible.legacy.stat Invoked with path=/etc/sasl2/pmcd.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:03:10 managed-node3 python3.9[24375]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sasl2/pmcd.conf _original_basename=pmcd.sasl2.conf.j2 recurse=False state=file path=/etc/sasl2/pmcd.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:11 managed-node3 python3.9[24482]: ansible-ansible.legacy.systemd Invoked with name=pmcd state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Oct 09 02:03:12 managed-node3 python3.9[24591]: ansible-file Invoked with path=/etc/pcp/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:12 managed-node3 python3.9[24698]: ansible-file Invoked with path=/etc/pcp/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:13 managed-node3 python3.9[24805]: ansible-file Invoked with path=/etc/pcp/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:13 managed-node3 python3.9[24912]: ansible-file Invoked with path=/etc/pcp/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:13 managed-node3 python3.9[25019]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:14 managed-node3 python3.9[25126]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:14 managed-node3 python3.9[25233]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:14 managed-node3 python3.9[25340]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:15 managed-node3 python3.9[25447]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcplistenoverflows follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:03:15 managed-node3 python3.9[25501]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcplistenoverflows _original_basename=tcplistenoverflows recurse=False state=file path=/etc/pcp/pmieconf/network/tcplistenoverflows force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:15 managed-node3 python3.9[25608]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldocookies follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:03:16 managed-node3 python3.9[25662]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcpqfulldocookies _original_basename=tcpqfulldocookies recurse=False state=file path=/etc/pcp/pmieconf/network/tcpqfulldocookies force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:16 managed-node3 python3.9[25769]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldrops follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:03:16 managed-node3 python3.9[25823]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcpqfulldrops _original_basename=tcpqfulldrops recurse=False state=file path=/etc/pcp/pmieconf/network/tcpqfulldrops force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:16 managed-node3 python3.9[25930]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/power/thermal_throttle follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:03:17 managed-node3 python3.9[25984]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/power/thermal_throttle _original_basename=thermal_throttle recurse=False state=file path=/etc/pcp/pmieconf/power/thermal_throttle force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:17 managed-node3 python3.9[26091]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/zeroconf/all_threads follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:03:17 managed-node3 python3.9[26145]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/zeroconf/all_threads _original_basename=all_threads recurse=False state=file path=/etc/pcp/pmieconf/zeroconf/all_threads force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:18 managed-node3 python3.9[26252]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/filesys/vfs_files follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:03:18 managed-node3 python3.9[26306]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/filesys/vfs_files _original_basename=vfs_files recurse=False state=file path=/etc/pcp/pmieconf/filesys/vfs_files force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:18 managed-node3 python3.9[26413]: ansible-lineinfile Invoked with state=absent path=/var/lib/pcp/config/pmie/config.default regexp=//.*global webhook_endpoint = "" backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None line=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:19 managed-node3 python3.9[26520]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcplistenoverflows dest=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:19 managed-node3 python3.9[26627]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldocookies dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:20 managed-node3 python3.9[26734]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldrops dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:20 managed-node3 python3.9[26841]: ansible-file Invoked with src=/etc/pcp/pmieconf/power/thermal_throttle dest=/var/lib/pcp/config/pmieconf/power/thermal_throttle state=link force=True path=/var/lib/pcp/config/pmieconf/power/thermal_throttle recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:20 managed-node3 python3.9[26948]: ansible-file Invoked with src=/etc/pcp/pmieconf/zeroconf/all_threads dest=/var/lib/pcp/config/pmieconf/zeroconf/all_threads state=link force=True path=/var/lib/pcp/config/pmieconf/zeroconf/all_threads recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:21 managed-node3 python3.9[27055]: ansible-file Invoked with src=/etc/pcp/pmieconf/filesys/vfs_files dest=/var/lib/pcp/config/pmieconf/filesys/vfs_files state=link force=True path=/var/lib/pcp/config/pmieconf/filesys/vfs_files recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:21 managed-node3 python3.9[27162]: ansible-ansible.legacy.systemd Invoked with name=pmie state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Oct 09 02:03:22 managed-node3 python3.9[27271]: ansible-lineinfile Invoked with path=/etc/pcp.conf regexp=^PCP_ARCHIVE_DIR= line=PCP_ARCHIVE_DIR=/var/log/pcp/pmlogger state=present backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:22 managed-node3 python3.9[27378]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:03:22 managed-node3 python3.9[27432]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmlogger _original_basename=pmlogger.defaults.j2 recurse=False state=file path=/etc/sysconfig/pmlogger force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:23 managed-node3 python3.9[27539]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger_timers follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:03:23 managed-node3 python3.9[27593]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmlogger_timers _original_basename=pmlogger.timers.j2 recurse=False state=file path=/etc/sysconfig/pmlogger_timers force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:24 managed-node3 python3.9[27700]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Oct 09 02:03:25 managed-node3 python3.9[27809]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Oct 09 02:03:25 managed-node3 python3.9[27916]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Oct 09 02:03:26 managed-node3 python3.9[28023]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Oct 09 02:03:27 managed-node3 python3.9[28131]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Oct 09 02:03:28 managed-node3 dbus-broker-launch[577]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Oct 09 02:03:28 managed-node3 dbus-broker-launch[577]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Oct 09 02:03:28 managed-node3 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-rc279291c2f8649a1905a72543fd76ebd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-rc279291c2f8649a1905a72543fd76ebd.service has finished successfully. ░░ ░░ The job identifier is 1866. Oct 09 02:03:28 managed-node3 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1929. Oct 09 02:03:29 managed-node3 python3.9[28596]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Oct 09 02:03:29 managed-node3 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Oct 09 02:03:29 managed-node3 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1929. Oct 09 02:03:29 managed-node3 systemd[1]: run-rc279291c2f8649a1905a72543fd76ebd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-rc279291c2f8649a1905a72543fd76ebd.service has successfully entered the 'dead' state. Oct 09 02:03:31 managed-node3 python3.9[28730]: ansible-seboolean Invoked with name=pcp_bind_all_unreserved_ports state=True persistent=False ignore_selinux_state=False Oct 09 02:03:31 managed-node3 python3.9[28837]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Oct 09 02:03:35 managed-node3 python3.9[28944]: ansible-ansible.legacy.command Invoked with _raw_params=pcp _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 09 02:03:35 managed-node3 python3.9[29142]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail pmprobe -I pmcd.pmlogger.pmcd_host | grep '"primary"' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 09 02:03:36 managed-node3 python3.9[29252]: ansible-ansible.legacy.command Invoked with _raw_params=grep "^# Ansible managed" "/etc/sysconfig/pmlogger" _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 09 02:03:36 managed-node3 python3.9[29360]: ansible-ansible.legacy.command Invoked with _raw_params=grep "^# Ansible managed" "/etc/sysconfig/pmlogger_timers" _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 09 02:03:37 managed-node3 python3.9[29468]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail pmprobe -I pmcd.pmie.pmcd_host | grep '"primary"' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 09 02:03:37 managed-node3 python3.9[29578]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail semanage boolean --list | egrep "pcp_bind_all_unreserved_ports *\(on " _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 09 02:03:39 managed-node3 python3.9[29688]: ansible-service_facts Invoked Oct 09 02:03:40 managed-node3 dbus-broker-launch[578]: avc: op=load_policy lsm=selinux seqno=3 res=1 Oct 09 02:03:41 managed-node3 python3.9[29880]: ansible-ansible.legacy.systemd Invoked with name=pmcd state=started daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Oct 09 02:03:41 managed-node3 python3.9[29988]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=started daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Oct 09 02:03:42 managed-node3 python3.9[30096]: ansible-ansible.legacy.systemd Invoked with name=pmie state=started daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Oct 09 02:03:42 managed-node3 python3.9[30204]: ansible-ansible.legacy.systemd Invoked with name=pmproxy state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Oct 09 02:03:44 managed-node3 python3.9[30347]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Oct 09 02:03:45 managed-node3 python3.9[30480]: ansible-service_facts Invoked Oct 09 02:03:48 managed-node3 python3.9[30672]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Oct 09 02:03:49 managed-node3 python3.9[30779]: ansible-ansible.legacy.dnf Invoked with name=['pcp', 'pcp-zeroconf'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Oct 09 02:03:50 managed-node3 python3.9[30887]: ansible-ansible.legacy.dnf Invoked with name=['cyrus-sasl-lib', 'cyrus-sasl-scram'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Oct 09 02:03:51 managed-node3 python3.9[30995]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/pcp/pmcd/pmcd.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 09 02:03:52 managed-node3 python3.9[31103]: ansible-file Invoked with path=/etc/pcp/labels state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:52 managed-node3 python3.9[31210]: ansible-file Invoked with path=/etc/pcp/labels/optional state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:53 managed-node3 python3.9[31317]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:03:53 managed-node3 python3.9[31371]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/pcp/labels/ansible-managed _original_basename=pmcd.explicit.labels.j2 recurse=False state=file path=/etc/pcp/labels/ansible-managed force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:53 managed-node3 python3.9[31478]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/optional/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:03:54 managed-node3 python3.9[31532]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/pcp/labels/optional/ansible-managed _original_basename=pmcd.implicit.labels.j2 recurse=False state=file path=/etc/pcp/labels/optional/ansible-managed force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:54 managed-node3 python3.9[31643]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmcd follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:03:54 managed-node3 python3.9[31697]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmcd _original_basename=pmcd.defaults.j2 recurse=False state=file path=/etc/sysconfig/pmcd force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:55 managed-node3 python3.9[31804]: ansible-user Invoked with name=metrics system=True state=present non_unique=False force=False remove=False create_home=True move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node3 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Oct 09 02:03:55 managed-node3 python3.9[31913]: ansible-ansible.legacy.command Invoked with _raw_params=set -eu if set -o | grep -q pipefail; then set -o pipefail # pipefail not supported on debian, some ubuntu fi if ! sasldblistusers2 -f "/etc/pcp/passwd.db" | grep -q "^metrics@"; then echo "Creating new metrics user in /etc/pcp/passwd.db" echo "metrics" | saslpasswd2 -a pmcd "metrics" chown root:pcp "/etc/pcp/passwd.db" chmod 640 "/etc/pcp/passwd.db" fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 09 02:03:56 managed-node3 python3.9[32025]: ansible-ansible.legacy.stat Invoked with path=/etc/sasl2/pmcd.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:03:56 managed-node3 python3.9[32079]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sasl2/pmcd.conf _original_basename=pmcd.sasl2.conf.j2 recurse=False state=file path=/etc/sasl2/pmcd.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:57 managed-node3 python3.9[32186]: ansible-ansible.legacy.systemd Invoked with name=pmcd state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Oct 09 02:03:57 managed-node3 python3.9[32295]: ansible-file Invoked with path=/etc/pcp/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:58 managed-node3 python3.9[32402]: ansible-file Invoked with path=/etc/pcp/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:58 managed-node3 python3.9[32509]: ansible-file Invoked with path=/etc/pcp/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:58 managed-node3 python3.9[32616]: ansible-file Invoked with path=/etc/pcp/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:59 managed-node3 python3.9[32723]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:59 managed-node3 python3.9[32830]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:03:59 managed-node3 python3.9[32937]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:04:00 managed-node3 python3.9[33044]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:04:00 managed-node3 python3.9[33151]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcplistenoverflows follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:04:00 managed-node3 python3.9[33205]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcplistenoverflows _original_basename=tcplistenoverflows recurse=False state=file path=/etc/pcp/pmieconf/network/tcplistenoverflows force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:04:01 managed-node3 python3.9[33312]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldocookies follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:04:01 managed-node3 python3.9[33366]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcpqfulldocookies _original_basename=tcpqfulldocookies recurse=False state=file path=/etc/pcp/pmieconf/network/tcpqfulldocookies force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:04:01 managed-node3 python3.9[33473]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldrops follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:04:02 managed-node3 python3.9[33527]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcpqfulldrops _original_basename=tcpqfulldrops recurse=False state=file path=/etc/pcp/pmieconf/network/tcpqfulldrops force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:04:02 managed-node3 python3.9[33634]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/power/thermal_throttle follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:04:02 managed-node3 python3.9[33688]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/power/thermal_throttle _original_basename=thermal_throttle recurse=False state=file path=/etc/pcp/pmieconf/power/thermal_throttle force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:04:02 managed-node3 python3.9[33795]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/zeroconf/all_threads follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:04:03 managed-node3 python3.9[33849]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/zeroconf/all_threads _original_basename=all_threads recurse=False state=file path=/etc/pcp/pmieconf/zeroconf/all_threads force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:04:03 managed-node3 python3.9[33956]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/filesys/vfs_files follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:04:03 managed-node3 python3.9[34010]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/filesys/vfs_files _original_basename=vfs_files recurse=False state=file path=/etc/pcp/pmieconf/filesys/vfs_files force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:04:04 managed-node3 python3.9[34117]: ansible-lineinfile Invoked with state=absent path=/var/lib/pcp/config/pmie/config.default regexp=//.*global webhook_endpoint = "" backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None line=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:04:04 managed-node3 python3.9[34224]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcplistenoverflows dest=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:04:05 managed-node3 python3.9[34331]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldocookies dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:04:05 managed-node3 python3.9[34438]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldrops dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:04:06 managed-node3 python3.9[34545]: ansible-file Invoked with src=/etc/pcp/pmieconf/power/thermal_throttle dest=/var/lib/pcp/config/pmieconf/power/thermal_throttle state=link force=True path=/var/lib/pcp/config/pmieconf/power/thermal_throttle recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:04:06 managed-node3 python3.9[34652]: ansible-file Invoked with src=/etc/pcp/pmieconf/zeroconf/all_threads dest=/var/lib/pcp/config/pmieconf/zeroconf/all_threads state=link force=True path=/var/lib/pcp/config/pmieconf/zeroconf/all_threads recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:04:06 managed-node3 python3.9[34759]: ansible-file Invoked with src=/etc/pcp/pmieconf/filesys/vfs_files dest=/var/lib/pcp/config/pmieconf/filesys/vfs_files state=link force=True path=/var/lib/pcp/config/pmieconf/filesys/vfs_files recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:04:07 managed-node3 python3.9[34866]: ansible-ansible.legacy.systemd Invoked with name=pmie state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Oct 09 02:04:07 managed-node3 python3.9[34975]: ansible-lineinfile Invoked with path=/etc/pcp.conf regexp=^PCP_ARCHIVE_DIR= line=PCP_ARCHIVE_DIR=/var/log/pcp/pmlogger state=present backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:04:08 managed-node3 python3.9[35082]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:04:08 managed-node3 python3.9[35136]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmlogger _original_basename=pmlogger.defaults.j2 recurse=False state=file path=/etc/sysconfig/pmlogger force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:04:08 managed-node3 python3.9[35243]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger_timers follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:04:09 managed-node3 python3.9[35297]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmlogger_timers _original_basename=pmlogger.timers.j2 recurse=False state=file path=/etc/sysconfig/pmlogger_timers force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:04:09 managed-node3 python3.9[35404]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Oct 09 02:04:10 managed-node3 python3.9[35513]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmproxy follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 09 02:04:10 managed-node3 python3.9[35600]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1728453849.898894-12937-36811309722922/.source dest=/etc/sysconfig/pmproxy mode=0644 follow=False _original_basename=pmproxy.defaults.j2 checksum=fa1a173dfa5b1affbf6767115bdae2ce00e98ecc backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:04:11 managed-node3 python3.9[35707]: ansible-ansible.legacy.systemd Invoked with name=pmproxy state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Oct 09 02:04:11 managed-node3 systemd[1]: Reloading. Oct 09 02:04:11 managed-node3 systemd-rc-local-generator[35727]: /etc/rc.d/rc.local is not marked executable, skipping. Oct 09 02:04:11 managed-node3 systemd[1]: Starting Proxy for Performance Metrics Collector Daemon... ░░ Subject: A start job for unit pmproxy.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmproxy.service has begun execution. ░░ ░░ The job identifier is 1992. Oct 09 02:04:11 managed-node3 systemd[1]: Started Proxy for Performance Metrics Collector Daemon. ░░ Subject: A start job for unit pmproxy.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmproxy.service has finished successfully. ░░ ░░ The job identifier is 1992. Oct 09 02:04:12 managed-node3 python3.9[35891]: ansible-ansible.legacy.dnf Invoked with name=['grafana', 'grafana-pcp'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Oct 09 02:04:23 managed-node3 groupadd[35906]: group added to /etc/group: name=grafana, GID=991 Oct 09 02:04:23 managed-node3 groupadd[35906]: group added to /etc/gshadow: name=grafana Oct 09 02:04:23 managed-node3 groupadd[35906]: new group: name=grafana, GID=991 Oct 09 02:04:23 managed-node3 useradd[35913]: new user: name=grafana, UID=991, GID=991, home=/usr/share/grafana, shell=/usr/sbin/nologin, from=none Oct 09 02:04:42 managed-node3 kernel: SELinux: Converting 426 SID table entries... Oct 09 02:04:42 managed-node3 kernel: SELinux: policy capability network_peer_controls=1 Oct 09 02:04:42 managed-node3 kernel: SELinux: policy capability open_perms=1 Oct 09 02:04:42 managed-node3 kernel: SELinux: policy capability extended_socket_class=1 Oct 09 02:04:42 managed-node3 kernel: SELinux: policy capability always_check_network=0 Oct 09 02:04:42 managed-node3 kernel: SELinux: policy capability cgroup_seclabel=1 Oct 09 02:04:42 managed-node3 kernel: SELinux: policy capability nnp_nosuid_transition=1 Oct 09 02:04:42 managed-node3 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Oct 09 02:04:44 managed-node3 kernel: SELinux: Converting 430 SID table entries... Oct 09 02:04:44 managed-node3 kernel: SELinux: policy capability network_peer_controls=1 Oct 09 02:04:44 managed-node3 kernel: SELinux: policy capability open_perms=1 Oct 09 02:04:44 managed-node3 kernel: SELinux: policy capability extended_socket_class=1 Oct 09 02:04:44 managed-node3 kernel: SELinux: policy capability always_check_network=0 Oct 09 02:04:44 managed-node3 kernel: SELinux: policy capability cgroup_seclabel=1 Oct 09 02:04:44 managed-node3 kernel: SELinux: policy capability nnp_nosuid_transition=1 Oct 09 02:04:44 managed-node3 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Oct 09 02:04:44 managed-node3 dbus-broker-launch[578]: avc: op=load_policy lsm=selinux seqno=5 res=1 Oct 09 02:04:44 managed-node3 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r25e7a8dd01c84b8a8feea2e4b18ff922.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r25e7a8dd01c84b8a8feea2e4b18ff922.service has finished successfully. ░░ ░░ The job identifier is 2056. Oct 09 02:04:44 managed-node3 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 2119. Oct 09 02:04:44 managed-node3 systemd[1]: Reloading. Oct 09 02:04:44 managed-node3 systemd-rc-local-generator[35977]: /etc/rc.d/rc.local is not marked executable, skipping. Oct 09 02:04:45 managed-node3 systemd[1]: Queuing reload/restart jobs for marked units… Oct 09 02:04:45 managed-node3 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Oct 09 02:04:45 managed-node3 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 2119. Oct 09 02:04:45 managed-node3 systemd[1]: run-r25e7a8dd01c84b8a8feea2e4b18ff922.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r25e7a8dd01c84b8a8feea2e4b18ff922.service has successfully entered the 'dead' state. Oct 09 02:04:50 managed-node3 python3.9[36252]: ansible-package_facts Invoked with manager=['auto'] strategy=first Oct 09 02:04:51 managed-node3 python3.9[36414]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex echo '##################' echo List of SELinux AVCs - note list may be empty grep type=AVC /var/log/audit/audit.log echo '##################' ls -alrtF /run if [ -d /run/pcp ]; then ls -alrtF /run/pcp else echo ERROR - /run/pcp does not exist fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 09 02:04:52 managed-node3 python3.9[36526]: ansible-service_facts Invoked Oct 09 02:04:53 managed-node3 python3.9[36719]: ansible-ansible.legacy.systemd Invoked with name=pmcd state=started daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Oct 09 02:04:54 managed-node3 python3.9[36831]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=started daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Oct 09 02:04:54 managed-node3 python3.9[36939]: ansible-ansible.legacy.systemd Invoked with name=pmie state=started daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Oct 09 02:04:55 managed-node3 python3.9[37047]: ansible-ansible.legacy.systemd Invoked with name=pmproxy state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Oct 09 02:04:55 managed-node3 systemd[1]: Stopping Proxy for Performance Metrics Collector Daemon... ░░ Subject: A stop job for unit pmproxy.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmproxy.service has begun execution. ░░ ░░ The job identifier is 2182. Oct 09 02:04:55 managed-node3 systemd[1]: pmproxy.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmproxy.service has successfully entered the 'dead' state. Oct 09 02:04:55 managed-node3 systemd[1]: Stopped Proxy for Performance Metrics Collector Daemon. ░░ Subject: A stop job for unit pmproxy.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmproxy.service has finished. ░░ ░░ The job identifier is 2182 and the job result is done. Oct 09 02:04:57 managed-node3 python3.9[37192]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Oct 09 02:04:58 managed-node3 python3.9[37325]: ansible-service_facts Invoked Oct 09 02:05:00 managed-node3 python3.9[37518]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Oct 09 02:05:01 managed-node3 python3.9[37625]: ansible-ansible.legacy.dnf Invoked with name=['redis'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Oct 09 02:05:02 managed-node3 groupadd[37632]: group added to /etc/group: name=redis, GID=990 Oct 09 02:05:02 managed-node3 groupadd[37632]: group added to /etc/gshadow: name=redis Oct 09 02:05:02 managed-node3 groupadd[37632]: new group: name=redis, GID=990 Oct 09 02:05:02 managed-node3 useradd[37639]: new user: name=redis, UID=990, GID=990, home=/var/lib/redis, shell=/sbin/nologin, from=none Oct 09 02:05:02 managed-node3 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r11b2d8f1d7b0431f85d68a77ceb6e156.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r11b2d8f1d7b0431f85d68a77ceb6e156.service has finished successfully. ░░ ░░ The job identifier is 2183. Oct 09 02:05:02 managed-node3 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 2246. Oct 09 02:05:02 managed-node3 systemd[1]: Reloading. Oct 09 02:05:02 managed-node3 systemd-rc-local-generator[37675]: /etc/rc.d/rc.local is not marked executable, skipping. Oct 09 02:05:02 managed-node3 systemd[1]: Queuing reload/restart jobs for marked units… Oct 09 02:05:03 managed-node3 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Oct 09 02:05:03 managed-node3 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 2246. Oct 09 02:05:03 managed-node3 systemd[1]: run-r11b2d8f1d7b0431f85d68a77ceb6e156.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r11b2d8f1d7b0431f85d68a77ceb6e156.service has successfully entered the 'dead' state. Oct 09 02:05:03 managed-node3 python3.9[37967]: ansible-file Invoked with path=/etc/redis state=directory owner=redis group=root mode=0750 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Oct 09 02:05:04 managed-node3 python3.9[38129]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex echo '##################' echo List of SELinux AVCs - note list may be empty grep type=AVC /var/log/audit/audit.log echo '##################' ls -alrtF /run if [ -d /run/pcp ]; then ls -alrtF /run/pcp else echo ERROR - /run/pcp does not exist fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None ################## List of SELinux AVCs - note list may be empty ################## total 40 dr-xr-xr-x. 18 root root 235 Oct 1 13:02 ../ drwxr-xr-x. 2 root root 60 Oct 9 01:55 tmpfiles.d/ drwxr-xr-x. 3 root root 60 Oct 9 01:55 log/ drwxr-xr-x. 2 root root 40 Oct 9 01:55 mount/ drwxr-xr-x. 4 root root 100 Oct 9 01:55 initramfs/ -r--r--r--. 1 root root 33 Oct 9 01:55 machine-id srw-rw-rw-. 1 root root 0 Oct 9 01:55 rpcbind.sock= prw-------. 1 root root 0 Oct 9 01:55 initctl| drwxr-xr-x. 5 root root 100 Oct 9 01:55 credentials/ drwx------. 2 root root 40 Oct 9 01:55 cryptsetup/ drwxr-xr-x. 2 root root 40 Oct 9 01:55 setrans/ drwxr-xr-x. 2 root root 40 Oct 9 01:55 console/ drwxr-xr-x. 2 root root 40 Oct 9 01:55 sepermit/ drwxr-xr-x. 2 root root 40 Oct 9 01:55 motd.d/ drwxr-xr-x. 2 root root 40 Oct 9 01:55 faillock/ drwx--x--x. 3 root root 60 Oct 9 01:55 sudo/ -rw-r--r--. 1 root root 0 Oct 9 01:55 motd drwxr-xr-x. 3 root root 60 Oct 9 01:55 tpm2-tss/ drwx------. 2 rpc rpc 60 Oct 9 01:55 rpcbind/ -rw-r--r--. 1 root root 4 Oct 9 01:55 auditd.pid drwxr-xr-x. 2 root root 60 Oct 9 01:55 dbus/ srw-rw-rw-. 1 root root 0 Oct 9 01:55 .heim_org.h5l.kcm-socket= drwxr-xr-x. 2 root root 60 Oct 9 01:55 irqbalance/ -rw-r--r--. 1 root root 619 Oct 9 01:56 dhclient.lease -rw-r--r--. 1 root root 4 Oct 9 01:56 dhclient.pid -rw-------. 1 root root 4 Oct 9 01:56 gssproxy.pid srw-rw-rw-. 1 root root 0 Oct 9 01:56 gssproxy.sock= drwxr-xr-x. 2 root root 60 Oct 9 01:56 chrony-dhcp/ drwxr-x---. 2 chrony chrony 80 Oct 9 01:56 chrony/ drwxr-xr-x. 3 root root 80 Oct 9 01:56 lock/ -rw-------. 1 root root 4 Oct 9 01:56 sm-notify.pid -rw-r--r--. 1 root root 4 Oct 9 01:56 sshd.pid -rw-------. 1 root root 3 Oct 9 01:56 rsyslogd.pid -rw-r--r--. 1 root root 4 Oct 9 01:56 crond.pid ----------. 1 root root 0 Oct 9 01:56 cron.reboot drwx------. 3 root root 340 Oct 9 01:56 cloud-init/ -rw-------. 1 root root 0 Oct 9 01:56 agetty.reload drwxr-xr-x. 2 root root 80 Oct 9 01:56 blkid/ drwxr-xr-x. 3 root root 60 Oct 9 01:58 user/ drwxr-xr-x. 6 root root 160 Oct 9 02:00 NetworkManager/ drwxr-xr-x. 7 root root 160 Oct 9 02:02 udev/ drwxr-xr-x. 2 grafana grafana 40 Oct 9 02:04 grafana/ drwxr-xr-x. 29 root root 920 Oct 9 02:04 ./ drwxrwxr-x. 2 pcp pcp 160 Oct 9 02:04 pcp/ drwxr-xr-x. 22 root root 560 Oct 9 02:05 systemd/ -rw-rw-r--. 1 root utmp 1920 Oct 9 02:05 utmp total 12 srw-rw-rw-. 1 root root 0 Oct 9 02:02 pmcd.socket= -r--r--r--. 1 root root 5 Oct 9 02:02 pmcd.pid -r--r--r--. 1 pcp pcp 5 Oct 9 02:02 pmie.pid lrwxrwxrwx. 1 pcp pcp 30 Oct 9 02:02 pmlogger.primary.socket -> /run/pcp/pmlogger.21693.socket= -r--r--r--. 1 pcp pcp 5 Oct 9 02:02 pmlogger.pid srw-rw-rw-. 1 pcp pcp 0 Oct 9 02:02 pmlogger.21693.socket= drwxr-xr-x. 29 root root 920 Oct 9 02:04 ../ drwxrwxr-x. 2 pcp pcp 160 Oct 9 02:04 ./ TASK [Reraise error] *********************************************************** task path: /tmp/collections-kpy/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml:17 Wednesday 09 October 2024 02:05:04 -0400 (0:00:00.529) 0:00:08.077 ***** fatal: [managed-node3]: FAILED! => { "changed": false } MSG: {'results': [{'failed': True, 'msg': "AnsibleUndefinedVariable: 'redis_save_to_disk' is undefined", 'exception': 'Traceback (most recent call last):\n File "/usr/local/lib/python3.12/site-packages/ansible/template/__init__.py", line 993, in do_template\n res = myenv.concat(rf)\n ^^^^^^^^^^^^^^^^\n File "/usr/local/lib/python3.12/site-packages/ansible/template/native_helpers.py", line 81, in ansible_concat\n return \'\'.join([to_text(v) for v in nodes])\n ^^^^^\n File "