ansible-playbook 2.9.27 config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.9/site-packages/ansible executable location = /usr/local/bin/ansible-playbook python version = 3.9.19 (main, May 16 2024, 11:40:09) [GCC 8.5.0 20210514 (Red Hat 8.5.0-22)] No config file found; using defaults [WARNING]: running playbook inside collection fedora.linux_system_roles Skipping callback 'actionable', as we already have a stdout callback. Skipping callback 'counter_enabled', as we already have a stdout callback. Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'dense', as we already have a stdout callback. Skipping callback 'dense', as we already have a stdout callback. Skipping callback 'full_skip', as we already have a stdout callback. Skipping callback 'json', as we already have a stdout callback. Skipping callback 'jsonl', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'null', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. Skipping callback 'selective', as we already have a stdout callback. Skipping callback 'skippy', as we already have a stdout callback. Skipping callback 'stderr', as we already have a stdout callback. Skipping callback 'unixy', as we already have a stdout callback. Skipping callback 'yaml', as we already have a stdout callback. PLAYBOOK: tests_quadlet_demo.yml *********************************************** 2 plays in /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml PLAY [all] ********************************************************************* META: ran handlers TASK [Include vault variables] ************************************************* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:5 Saturday 12 July 2025 12:39:55 -0400 (0:00:00.024) 0:00:00.024 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_test_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n" }, "mysql_container_root_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n" } }, "ansible_included_var_files": [ "/tmp/podman-Tc3/tests/vars/vault-variables.yml" ], "changed": false } META: ran handlers META: ran handlers PLAY [Deploy the quadlet demo app] ********************************************* TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9 Saturday 12 July 2025 12:39:55 -0400 (0:00:00.019) 0:00:00.044 ********* ok: [managed-node2] META: ran handlers TASK [Test is only supported on x86_64] **************************************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:38 Saturday 12 July 2025 12:39:56 -0400 (0:00:00.911) 0:00:00.955 ********* skipping: [managed-node2] => {} META: TASK [Generate certificates] *************************************************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:51 Saturday 12 July 2025 12:39:56 -0400 (0:00:00.083) 0:00:01.039 ********* TASK [fedora.linux_system_roles.certificate : Set version specific variables] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:2 Saturday 12 July 2025 12:39:56 -0400 (0:00:00.058) 0:00:01.097 ********* included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.certificate : Ensure ansible_facts used by role] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:2 Saturday 12 July 2025 12:39:56 -0400 (0:00:00.036) 0:00:01.133 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.certificate : Check if system is ostree] ******* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:10 Saturday 12 July 2025 12:39:56 -0400 (0:00:00.024) 0:00:01.158 ********* ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.certificate : Set flag to indicate system is ostree] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:15 Saturday 12 July 2025 12:39:57 -0400 (0:00:00.501) 0:00:01.660 ********* ok: [managed-node2] => { "ansible_facts": { "__certificate_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.certificate : Run systemctl] ******************* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:22 Saturday 12 July 2025 12:39:57 -0400 (0:00:00.020) 0:00:01.680 ********* ok: [managed-node2] => { "changed": false, "cmd": [ "systemctl", "is-system-running" ], "delta": "0:00:00.007286", "end": "2025-07-12 12:39:57.815379", "failed_when_result": false, "rc": 0, "start": "2025-07-12 12:39:57.808093" } STDOUT: running TASK [fedora.linux_system_roles.certificate : Require installed systemd] ******* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:30 Saturday 12 July 2025 12:39:57 -0400 (0:00:00.450) 0:00:02.131 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.certificate : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:35 Saturday 12 July 2025 12:39:57 -0400 (0:00:00.022) 0:00:02.153 ********* ok: [managed-node2] => { "ansible_facts": { "__certificate_is_booted": true }, "changed": false } TASK [fedora.linux_system_roles.certificate : Set platform/version specific variables] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:40 Saturday 12 July 2025 12:39:57 -0400 (0:00:00.019) 0:00:02.173 ********* skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS_8.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS_8.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5 Saturday 12 July 2025 12:39:57 -0400 (0:00:00.034) 0:00:02.207 ********* changed: [managed-node2] => { "changed": true, "rc": 0, "results": [ "Installed: python3-pyasn1-0.3.7-6.el8.noarch" ] } lsrpackages: python3-cryptography python3-dbus python3-pyasn1 TASK [fedora.linux_system_roles.certificate : Ensure provider packages are installed] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:15 Saturday 12 July 2025 12:40:01 -0400 (0:00:03.603) 0:00:05.810 ********* changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "rc": 0, "results": [ "Installed: xmlrpc-c-client-1.51.0-9.el8.x86_64", "Installed: xmlrpc-c-1.51.0-9.el8.x86_64", "Installed: certmonger-0.79.17-2.el8.x86_64" ] } lsrpackages: certmonger TASK [fedora.linux_system_roles.certificate : Ensure pre-scripts hooks directory exists] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:25 Saturday 12 July 2025 12:40:05 -0400 (0:00:04.389) 0:00:10.200 ********* changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/etc/certmonger//pre-scripts", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.certificate : Ensure post-scripts hooks directory exists] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:49 Saturday 12 July 2025 12:40:06 -0400 (0:00:00.494) 0:00:10.695 ********* changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/etc/certmonger//post-scripts", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.certificate : Ensure provider service is running] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:76 Saturday 12 July 2025 12:40:06 -0400 (0:00:00.362) 0:00:11.057 ********* changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "enabled": true, "name": "certmonger", "state": "started", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "network.target sysinit.target systemd-journald.socket dbus.socket syslog.target dbus.service basic.target system.slice", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedorahosted.certmonger", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Certificate monitoring and PKI enrollment", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/certmonger (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/certmonger.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "certmonger.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "certmonger.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PIDFile": "/run/certmonger.pid", "PartOf": "dbus.service", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target dbus.socket system.slice", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.certificate : Ensure certificate requests] ***** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:86 Saturday 12 July 2025 12:40:07 -0400 (0:00:00.943) 0:00:12.001 ********* changed: [managed-node2] => (item={'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}) => { "ansible_loop_var": "item", "changed": true, "item": { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } } MSG: Certificate requested (new). TASK [fedora.linux_system_roles.certificate : Check if test mode is supported] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:138 Saturday 12 July 2025 12:40:08 -0400 (0:00:00.809) 0:00:12.811 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.certificate : Slurp the contents of the files] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:143 Saturday 12 July 2025 12:40:08 -0400 (0:00:00.017) 0:00:12.828 ********* ok: [managed-node2] => (item=['cert', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnakNDQW1xZ0F3SUJBZ0lRRnpjU2I5UlFTVEsvOWRIeDdoYXhiREFOQmdrcWhraUc5dzBCQVFzRkFEQlEKTVNBd0hnWURWUVFEREJkTWIyTmhiQ0JUYVdkdWFXNW5JRUYxZEdodmNtbDBlVEVzTUNvR0ExVUVBd3dqTVRjegpOekV5Tm1ZdFpEUTFNRFE1TXpJdFltWm1OV1F4WmpFdFpXVXhObUl4Tm1Jd0hoY05NalV3TnpFeU1UWTBNREE0CldoY05Nall3TnpFeU1UWTBNREEzV2pBVU1SSXdFQVlEVlFRREV3bHNiMk5oYkdodmMzUXdnZ0VpTUEwR0NTcUcKU0liM0RRRUJBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRQ3dNWjJFOTEzbUNnS0k5Y1IySmwxNDVGN0FQN2RONzB2Ngp6aXFWVFg5ZzY4cVR6SHpaWUxNK2YxUzRzK0VlTWIxTmVPRGZYVWVZeFh5NGpQSnhIUW9VQURhNVl1ZHorRnJHCm9RYzZPZHhLMGprbkZaTzR5OWhBcnMrV09TUG1tY3oyNWhENnhVMVF5VE9hK2EzU0Jlcms5SlAyQ3Y4YThvWmgKQzlDZ2IraWV5blpLY2FUU3FqYWF3dWtva0owdkRMTjNtVlcwNFdFeDY1WUhHeVdNdENKWmhjSnVQYmtPNkd5eApEOGg2UlNYYjhON1FYeUZjSEoxd2ovV3hPcG9VeEcwNnRvVjJqVEQ0WExLTlNIN0dHWlRqVmMwUGhHTXdvK0xEClFhWGoxY0ExcFlFb3RoYmlteWZteUZKYUFGSTc3aURaYWxEUGc4M0hRd3MzM0xJb0trQTdBZ01CQUFHamdaTXcKZ1pBd0N3WURWUjBQQkFRREFnV2dNQlFHQTFVZEVRUU5NQXVDQ1d4dlkyRnNhRzl6ZERBZEJnTlZIU1VFRmpBVQpCZ2dyQmdFRkJRY0RBUVlJS3dZQkJRVUhBd0l3REFZRFZSMFRBUUgvQkFJd0FEQWRCZ05WSFE0RUZnUVVOcDV4CmMyNVp6UmduOG5wd3pNbzVDM1VyMjE0d0h3WURWUjBqQkJnd0ZvQVVhRkZZTWJCekJ4N0J6YTMzRjdZUEgrcjgKcFFFd0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFMVGxuWklkNEpzMHFKa3ozYXBzblFrS0hzREUvMkE1TEJKOApkeXpCaUQrSEQzNUxWV1IvV00vZjk0VVdubkRSRTRoVWZYanZ2U0JWRmdrSHdld0p0MzdmK0IvSG1nTzZBQXphCmxGbFpFVUF3RlR5eSszaFJ3V3crTlR3UVhQY2xSaDNwYzFKdExPR2tsa1RLYU1TRE0vTittL1VLWmd0R1hvWkcKU0plc1czTWlhWFBzVFpLTkxoWE1BK0Fkak1wVnRUdzgvajRhVlJDMVBrejBzaHZZR2VXVTZwY1U5emJRdUdXTwpJYjZWTzVpZXljdWE4TDJ4NUVhWEFkTEl3VS9iUnJNcWhMRU1tT2kxanp1MCtmVGluM0hjNUVObWx1cm5YMkZDCm5pWVFxUmtIQi9MdWNWN1VOYVQxa0VIL2FoYkZRRkQrbndOanQ5NDhMV2FnMDlhVEZwVT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=", "encoding": "base64", "item": [ "cert", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/certs/quadlet_demo.crt" } ok: [managed-node2] => (item=['key', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2d0lCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktrd2dnU2xBZ0VBQW9JQkFRQ3dNWjJFOTEzbUNnS0kKOWNSMkpsMTQ1RjdBUDdkTjcwdjZ6aXFWVFg5ZzY4cVR6SHpaWUxNK2YxUzRzK0VlTWIxTmVPRGZYVWVZeFh5NApqUEp4SFFvVUFEYTVZdWR6K0ZyR29RYzZPZHhLMGprbkZaTzR5OWhBcnMrV09TUG1tY3oyNWhENnhVMVF5VE9hCithM1NCZXJrOUpQMkN2OGE4b1poQzlDZ2IraWV5blpLY2FUU3FqYWF3dWtva0owdkRMTjNtVlcwNFdFeDY1WUgKR3lXTXRDSlpoY0p1UGJrTzZHeXhEOGg2UlNYYjhON1FYeUZjSEoxd2ovV3hPcG9VeEcwNnRvVjJqVEQ0WExLTgpTSDdHR1pUalZjMFBoR013bytMRFFhWGoxY0ExcFlFb3RoYmlteWZteUZKYUFGSTc3aURaYWxEUGc4M0hRd3MzCjNMSW9La0E3QWdNQkFBRUNnZ0VCQUsvdTJ2c0toaGhOcG5wYjdmQ01Ib200ZjJRNE96RDI0VWR1MWRaL3E5RjcKb0t3MFkvWXJPTkFQRk9uWnM5QVNtcFBoUDZuSjJ0UVJYcGhwSlMwTmNpTEdrOTVKaEJPb3lmdk1qd3IwKzZNNgpoeGNmMUlHU0NqMk1VSHdRVW1qUTZua2ZBWlZscmU0YytncWxoWXRSblZ0QlhQMHh2Y21Lb2VRRmlDeXJrZnY3ClJlcnJCQmRMRGJKN3NvekVXYXdENGxvd2h3K2JLeFV0cDVEWk0ybTVvaEp2cWlWbHMzS1p3bUhOL3RNc1RCRmIKa1lML3lXOW40cXFZUzZDOERvR2xLZEYwbVFEdXliV081S01ycFB2bERyZ0h3Vzg5V2NtbjMwRjRKWU4zUDN3NgpTaXAyeU5kOVZwTHE0a3pBbkt1OGd3NHdERkNlT2JUUTZPcE5XQ2ZaWHdFQ2dZRUEzT2t4eGZVQUVMYW5OQiswClRURlFYelQwMVRYTXdRSUFUeERraDZISittMFl3SkgzYXFEVVg5cDN3UFpEYjFORlY2QnM5Q00yN29CN3kyKzIKWldUbW0zVDlwd3NsMzZWdy9INUYxTEVqSGliVkZnR3hDSzVkcmhUMGpSNHd6UmhlUWxtN2FXaExEZHlGRUx3WgpXY21DdTZyeDVQUFNQOFV6M2VVeVEzeEJoeThDZ1lFQXpDNGJ3bENtSDJRaVVudHdZUlBIQ2VpdjQ0UVVSVjJECm8xcGJaM09RV25OTjJaZ2M1ZzZqRFh0RlorYjd0UXAwajdTU25QT2FZS0E3UXJHbWtsVklMakhDOTc5ZkE1RnEKaTJ6MW5RaUh3cHZlQUpKbVNKU0xaYmRaRmtrWmU5MFZaYmpSYXJML0xhRmFkUm1TYjFLbVRIcUIxT0pwaUswRQpxNmJUdTdPYUZMVUNnWUVBMjZCci82Y292WFVhbnRPMmFkaFJVNVpPbzNxdGFtWVc4SVNXNzlHR0FSZGR1SFptClNQaEVyRmVESDdDMk1vUFRPTjhoODBiZlloY1MvSnEvOStaczdybE5SVG5zR1lGZDUybVMrM1p3dDVWYjNGa3EKTEx0T0g3aHR1YTcrVGFtZ2JNN1JhR1pyMkFnWnN4MlBCcGsxQkdkYnZvZzV5M1Mvc0NnaDVMNHdWN2tDZ1lFQQpoT2RpZEVxUXg1QWpjek1jUkdSZVE4bnRhNWdXcGN3b0dwVi9OOGlXWnZuMDY3VkM0bEVXdTNqSkJjZnBnYitMCmZkc1pNQWExTHo0OU5MMTBZbjRXbk5NUlNMaU52VUhUYTJ3Q09EUk1RbFc0M1FCdWljSC9OcmR3eXJ2TVZiN2oKNWJHMnh0QTZuOVlPam5pVnc5YTFuc1NEYkRId1plSFJUM1lwRGwzcTVwMENnWUFiYVE3WnJaSEozN2FoWCt0bQpRalRVV3VkcUg2ZXJWQ2tBeGZRNWNXd1U3R2NGS1I1YlAvSjlDVEsxQ0p0U0c1UUlkOXczZDRoM1hWOGdFbWZJCk9Id1hSc25QbWJLMTRDcHVpT1FqTmxkM0IyNHBoSDJBeGhQWjBibzVMNVNycm1yOWFRYXg1YWJDQ2lrOTY2WnkKTnVjeW1lb1I5QmRwdGtMMVVtUUwxVFhqMVE9PQotLS0tLUVORCBQUklWQVRFIEtFWS0tLS0tCg==", "encoding": "base64", "item": [ "key", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/private/quadlet_demo.key" } ok: [managed-node2] => (item=['ca', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnakNDQW1xZ0F3SUJBZ0lRRnpjU2I5UlFTVEsvOWRIeDdoYXhiREFOQmdrcWhraUc5dzBCQVFzRkFEQlEKTVNBd0hnWURWUVFEREJkTWIyTmhiQ0JUYVdkdWFXNW5JRUYxZEdodmNtbDBlVEVzTUNvR0ExVUVBd3dqTVRjegpOekV5Tm1ZdFpEUTFNRFE1TXpJdFltWm1OV1F4WmpFdFpXVXhObUl4Tm1Jd0hoY05NalV3TnpFeU1UWTBNREE0CldoY05Nall3TnpFeU1UWTBNREEzV2pBVU1SSXdFQVlEVlFRREV3bHNiMk5oYkdodmMzUXdnZ0VpTUEwR0NTcUcKU0liM0RRRUJBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRQ3dNWjJFOTEzbUNnS0k5Y1IySmwxNDVGN0FQN2RONzB2Ngp6aXFWVFg5ZzY4cVR6SHpaWUxNK2YxUzRzK0VlTWIxTmVPRGZYVWVZeFh5NGpQSnhIUW9VQURhNVl1ZHorRnJHCm9RYzZPZHhLMGprbkZaTzR5OWhBcnMrV09TUG1tY3oyNWhENnhVMVF5VE9hK2EzU0Jlcms5SlAyQ3Y4YThvWmgKQzlDZ2IraWV5blpLY2FUU3FqYWF3dWtva0owdkRMTjNtVlcwNFdFeDY1WUhHeVdNdENKWmhjSnVQYmtPNkd5eApEOGg2UlNYYjhON1FYeUZjSEoxd2ovV3hPcG9VeEcwNnRvVjJqVEQ0WExLTlNIN0dHWlRqVmMwUGhHTXdvK0xEClFhWGoxY0ExcFlFb3RoYmlteWZteUZKYUFGSTc3aURaYWxEUGc4M0hRd3MzM0xJb0trQTdBZ01CQUFHamdaTXcKZ1pBd0N3WURWUjBQQkFRREFnV2dNQlFHQTFVZEVRUU5NQXVDQ1d4dlkyRnNhRzl6ZERBZEJnTlZIU1VFRmpBVQpCZ2dyQmdFRkJRY0RBUVlJS3dZQkJRVUhBd0l3REFZRFZSMFRBUUgvQkFJd0FEQWRCZ05WSFE0RUZnUVVOcDV4CmMyNVp6UmduOG5wd3pNbzVDM1VyMjE0d0h3WURWUjBqQkJnd0ZvQVVhRkZZTWJCekJ4N0J6YTMzRjdZUEgrcjgKcFFFd0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFMVGxuWklkNEpzMHFKa3ozYXBzblFrS0hzREUvMkE1TEJKOApkeXpCaUQrSEQzNUxWV1IvV00vZjk0VVdubkRSRTRoVWZYanZ2U0JWRmdrSHdld0p0MzdmK0IvSG1nTzZBQXphCmxGbFpFVUF3RlR5eSszaFJ3V3crTlR3UVhQY2xSaDNwYzFKdExPR2tsa1RLYU1TRE0vTittL1VLWmd0R1hvWkcKU0plc1czTWlhWFBzVFpLTkxoWE1BK0Fkak1wVnRUdzgvajRhVlJDMVBrejBzaHZZR2VXVTZwY1U5emJRdUdXTwpJYjZWTzVpZXljdWE4TDJ4NUVhWEFkTEl3VS9iUnJNcWhMRU1tT2kxanp1MCtmVGluM0hjNUVObWx1cm5YMkZDCm5pWVFxUmtIQi9MdWNWN1VOYVQxa0VIL2FoYkZRRkQrbndOanQ5NDhMV2FnMDlhVEZwVT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=", "encoding": "base64", "item": [ "ca", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/certs/quadlet_demo.crt" } TASK [fedora.linux_system_roles.certificate : Reset certificate_test_certs] **** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:151 Saturday 12 July 2025 12:40:09 -0400 (0:00:01.101) 0:00:13.930 ********* ok: [managed-node2] => { "ansible_facts": { "certificate_test_certs": {} }, "changed": false } TASK [fedora.linux_system_roles.certificate : Create return data] ************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:155 Saturday 12 July 2025 12:40:09 -0400 (0:00:00.019) 0:00:13.950 ********* ok: [managed-node2] => (item=quadlet_demo) => { "ansible_facts": { "certificate_test_certs": { "quadlet_demo": { "ca": "/etc/pki/tls/certs/quadlet_demo.crt", "ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQFzcSb9RQSTK/9dHx7haxbDANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMTcz\nNzEyNmYtZDQ1MDQ5MzItYmZmNWQxZjEtZWUxNmIxNmIwHhcNMjUwNzEyMTY0MDA4\nWhcNMjYwNzEyMTY0MDA3WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCwMZ2E913mCgKI9cR2Jl145F7AP7dN70v6\nziqVTX9g68qTzHzZYLM+f1S4s+EeMb1NeODfXUeYxXy4jPJxHQoUADa5Yudz+FrG\noQc6OdxK0jknFZO4y9hArs+WOSPmmcz25hD6xU1QyTOa+a3SBerk9JP2Cv8a8oZh\nC9Cgb+ieynZKcaTSqjaawukokJ0vDLN3mVW04WEx65YHGyWMtCJZhcJuPbkO6Gyx\nD8h6RSXb8N7QXyFcHJ1wj/WxOpoUxG06toV2jTD4XLKNSH7GGZTjVc0PhGMwo+LD\nQaXj1cA1pYEothbimyfmyFJaAFI77iDZalDPg83HQws33LIoKkA7AgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUNp5x\nc25ZzRgn8npwzMo5C3Ur214wHwYDVR0jBBgwFoAUaFFYMbBzBx7Bza33F7YPH+r8\npQEwDQYJKoZIhvcNAQELBQADggEBALTlnZId4Js0qJkz3apsnQkKHsDE/2A5LBJ8\ndyzBiD+HD35LVWR/WM/f94UWnnDRE4hUfXjvvSBVFgkHwewJt37f+B/HmgO6AAza\nlFlZEUAwFTyy+3hRwWw+NTwQXPclRh3pc1JtLOGklkTKaMSDM/N+m/UKZgtGXoZG\nSJesW3MiaXPsTZKNLhXMA+AdjMpVtTw8/j4aVRC1Pkz0shvYGeWU6pcU9zbQuGWO\nIb6VO5ieycua8L2x5EaXAdLIwU/bRrMqhLEMmOi1jzu0+fTin3Hc5ENmlurnX2FC\nniYQqRkHB/LucV7UNaT1kEH/ahbFQFD+nwNjt948LWag09aTFpU=\n-----END CERTIFICATE-----\n", "cert": "/etc/pki/tls/certs/quadlet_demo.crt", "cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQFzcSb9RQSTK/9dHx7haxbDANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMTcz\nNzEyNmYtZDQ1MDQ5MzItYmZmNWQxZjEtZWUxNmIxNmIwHhcNMjUwNzEyMTY0MDA4\nWhcNMjYwNzEyMTY0MDA3WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCwMZ2E913mCgKI9cR2Jl145F7AP7dN70v6\nziqVTX9g68qTzHzZYLM+f1S4s+EeMb1NeODfXUeYxXy4jPJxHQoUADa5Yudz+FrG\noQc6OdxK0jknFZO4y9hArs+WOSPmmcz25hD6xU1QyTOa+a3SBerk9JP2Cv8a8oZh\nC9Cgb+ieynZKcaTSqjaawukokJ0vDLN3mVW04WEx65YHGyWMtCJZhcJuPbkO6Gyx\nD8h6RSXb8N7QXyFcHJ1wj/WxOpoUxG06toV2jTD4XLKNSH7GGZTjVc0PhGMwo+LD\nQaXj1cA1pYEothbimyfmyFJaAFI77iDZalDPg83HQws33LIoKkA7AgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUNp5x\nc25ZzRgn8npwzMo5C3Ur214wHwYDVR0jBBgwFoAUaFFYMbBzBx7Bza33F7YPH+r8\npQEwDQYJKoZIhvcNAQELBQADggEBALTlnZId4Js0qJkz3apsnQkKHsDE/2A5LBJ8\ndyzBiD+HD35LVWR/WM/f94UWnnDRE4hUfXjvvSBVFgkHwewJt37f+B/HmgO6AAza\nlFlZEUAwFTyy+3hRwWw+NTwQXPclRh3pc1JtLOGklkTKaMSDM/N+m/UKZgtGXoZG\nSJesW3MiaXPsTZKNLhXMA+AdjMpVtTw8/j4aVRC1Pkz0shvYGeWU6pcU9zbQuGWO\nIb6VO5ieycua8L2x5EaXAdLIwU/bRrMqhLEMmOi1jzu0+fTin3Hc5ENmlurnX2FC\nniYQqRkHB/LucV7UNaT1kEH/ahbFQFD+nwNjt948LWag09aTFpU=\n-----END CERTIFICATE-----\n", "key": "/etc/pki/tls/private/quadlet_demo.key", "key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQCwMZ2E913mCgKI\n9cR2Jl145F7AP7dN70v6ziqVTX9g68qTzHzZYLM+f1S4s+EeMb1NeODfXUeYxXy4\njPJxHQoUADa5Yudz+FrGoQc6OdxK0jknFZO4y9hArs+WOSPmmcz25hD6xU1QyTOa\n+a3SBerk9JP2Cv8a8oZhC9Cgb+ieynZKcaTSqjaawukokJ0vDLN3mVW04WEx65YH\nGyWMtCJZhcJuPbkO6GyxD8h6RSXb8N7QXyFcHJ1wj/WxOpoUxG06toV2jTD4XLKN\nSH7GGZTjVc0PhGMwo+LDQaXj1cA1pYEothbimyfmyFJaAFI77iDZalDPg83HQws3\n3LIoKkA7AgMBAAECggEBAK/u2vsKhhhNpnpb7fCMHom4f2Q4OzD24Udu1dZ/q9F7\noKw0Y/YrONAPFOnZs9ASmpPhP6nJ2tQRXphpJS0NciLGk95JhBOoyfvMjwr0+6M6\nhxcf1IGSCj2MUHwQUmjQ6nkfAZVlre4c+gqlhYtRnVtBXP0xvcmKoeQFiCyrkfv7\nRerrBBdLDbJ7sozEWawD4lowhw+bKxUtp5DZM2m5ohJvqiVls3KZwmHN/tMsTBFb\nkYL/yW9n4qqYS6C8DoGlKdF0mQDuybWO5KMrpPvlDrgHwW89Wcmn30F4JYN3P3w6\nSip2yNd9VpLq4kzAnKu8gw4wDFCeObTQ6OpNWCfZXwECgYEA3OkxxfUAELanNB+0\nTTFQXzT01TXMwQIATxDkh6HJ+m0YwJH3aqDUX9p3wPZDb1NFV6Bs9CM27oB7y2+2\nZWTmm3T9pwsl36Vw/H5F1LEjHibVFgGxCK5drhT0jR4wzRheQlm7aWhLDdyFELwZ\nWcmCu6rx5PPSP8Uz3eUyQ3xBhy8CgYEAzC4bwlCmH2QiUntwYRPHCeiv44QURV2D\no1pbZ3OQWnNN2Zgc5g6jDXtFZ+b7tQp0j7SSnPOaYKA7QrGmklVILjHC979fA5Fq\ni2z1nQiHwpveAJJmSJSLZbdZFkkZe90VZbjRarL/LaFadRmSb1KmTHqB1OJpiK0E\nq6bTu7OaFLUCgYEA26Br/6covXUantO2adhRU5ZOo3qtamYW8ISW79GGARdduHZm\nSPhErFeDH7C2MoPTON8h80bfYhcS/Jq/9+Zs7rlNRTnsGYFd52mS+3Zwt5Vb3Fkq\nLLtOH7htua7+TamgbM7RaGZr2AgZsx2PBpk1BGdbvog5y3S/sCgh5L4wV7kCgYEA\nhOdidEqQx5AjczMcRGReQ8nta5gWpcwoGpV/N8iWZvn067VC4lEWu3jJBcfpgb+L\nfdsZMAa1Lz49NL10Yn4WnNMRSLiNvUHTa2wCODRMQlW43QBuicH/NrdwyrvMVb7j\n5bG2xtA6n9YOjniVw9a1nsSDbDHwZeHRT3YpDl3q5p0CgYAbaQ7ZrZHJ37ahX+tm\nQjTUWudqH6erVCkAxfQ5cWwU7GcFKR5bP/J9CTK1CJtSG5QId9w3d4h3XV8gEmfI\nOHwXRsnPmbK14CpuiOQjNld3B24phH2AxhPZ0bo5L5Srrmr9aQax5abCCik966Zy\nNucymeoR9BdptkL1UmQL1TXj1Q==\n-----END PRIVATE KEY-----\n" } } }, "ansible_loop_var": "cert_name", "cert_name": "quadlet_demo", "changed": false } TASK [fedora.linux_system_roles.certificate : Stop tracking certificates] ****** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:169 Saturday 12 July 2025 12:40:09 -0400 (0:00:00.045) 0:00:13.995 ********* ok: [managed-node2] => (item={'cert': '/etc/pki/tls/certs/quadlet_demo.crt', 'key': '/etc/pki/tls/private/quadlet_demo.key', 'ca': '/etc/pki/tls/certs/quadlet_demo.crt', 'cert_content': '-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQFzcSb9RQSTK/9dHx7haxbDANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMTcz\nNzEyNmYtZDQ1MDQ5MzItYmZmNWQxZjEtZWUxNmIxNmIwHhcNMjUwNzEyMTY0MDA4\nWhcNMjYwNzEyMTY0MDA3WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCwMZ2E913mCgKI9cR2Jl145F7AP7dN70v6\nziqVTX9g68qTzHzZYLM+f1S4s+EeMb1NeODfXUeYxXy4jPJxHQoUADa5Yudz+FrG\noQc6OdxK0jknFZO4y9hArs+WOSPmmcz25hD6xU1QyTOa+a3SBerk9JP2Cv8a8oZh\nC9Cgb+ieynZKcaTSqjaawukokJ0vDLN3mVW04WEx65YHGyWMtCJZhcJuPbkO6Gyx\nD8h6RSXb8N7QXyFcHJ1wj/WxOpoUxG06toV2jTD4XLKNSH7GGZTjVc0PhGMwo+LD\nQaXj1cA1pYEothbimyfmyFJaAFI77iDZalDPg83HQws33LIoKkA7AgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUNp5x\nc25ZzRgn8npwzMo5C3Ur214wHwYDVR0jBBgwFoAUaFFYMbBzBx7Bza33F7YPH+r8\npQEwDQYJKoZIhvcNAQELBQADggEBALTlnZId4Js0qJkz3apsnQkKHsDE/2A5LBJ8\ndyzBiD+HD35LVWR/WM/f94UWnnDRE4hUfXjvvSBVFgkHwewJt37f+B/HmgO6AAza\nlFlZEUAwFTyy+3hRwWw+NTwQXPclRh3pc1JtLOGklkTKaMSDM/N+m/UKZgtGXoZG\nSJesW3MiaXPsTZKNLhXMA+AdjMpVtTw8/j4aVRC1Pkz0shvYGeWU6pcU9zbQuGWO\nIb6VO5ieycua8L2x5EaXAdLIwU/bRrMqhLEMmOi1jzu0+fTin3Hc5ENmlurnX2FC\nniYQqRkHB/LucV7UNaT1kEH/ahbFQFD+nwNjt948LWag09aTFpU=\n-----END CERTIFICATE-----\n', 'key_content': '-----BEGIN PRIVATE KEY-----\nMIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQCwMZ2E913mCgKI\n9cR2Jl145F7AP7dN70v6ziqVTX9g68qTzHzZYLM+f1S4s+EeMb1NeODfXUeYxXy4\njPJxHQoUADa5Yudz+FrGoQc6OdxK0jknFZO4y9hArs+WOSPmmcz25hD6xU1QyTOa\n+a3SBerk9JP2Cv8a8oZhC9Cgb+ieynZKcaTSqjaawukokJ0vDLN3mVW04WEx65YH\nGyWMtCJZhcJuPbkO6GyxD8h6RSXb8N7QXyFcHJ1wj/WxOpoUxG06toV2jTD4XLKN\nSH7GGZTjVc0PhGMwo+LDQaXj1cA1pYEothbimyfmyFJaAFI77iDZalDPg83HQws3\n3LIoKkA7AgMBAAECggEBAK/u2vsKhhhNpnpb7fCMHom4f2Q4OzD24Udu1dZ/q9F7\noKw0Y/YrONAPFOnZs9ASmpPhP6nJ2tQRXphpJS0NciLGk95JhBOoyfvMjwr0+6M6\nhxcf1IGSCj2MUHwQUmjQ6nkfAZVlre4c+gqlhYtRnVtBXP0xvcmKoeQFiCyrkfv7\nRerrBBdLDbJ7sozEWawD4lowhw+bKxUtp5DZM2m5ohJvqiVls3KZwmHN/tMsTBFb\nkYL/yW9n4qqYS6C8DoGlKdF0mQDuybWO5KMrpPvlDrgHwW89Wcmn30F4JYN3P3w6\nSip2yNd9VpLq4kzAnKu8gw4wDFCeObTQ6OpNWCfZXwECgYEA3OkxxfUAELanNB+0\nTTFQXzT01TXMwQIATxDkh6HJ+m0YwJH3aqDUX9p3wPZDb1NFV6Bs9CM27oB7y2+2\nZWTmm3T9pwsl36Vw/H5F1LEjHibVFgGxCK5drhT0jR4wzRheQlm7aWhLDdyFELwZ\nWcmCu6rx5PPSP8Uz3eUyQ3xBhy8CgYEAzC4bwlCmH2QiUntwYRPHCeiv44QURV2D\no1pbZ3OQWnNN2Zgc5g6jDXtFZ+b7tQp0j7SSnPOaYKA7QrGmklVILjHC979fA5Fq\ni2z1nQiHwpveAJJmSJSLZbdZFkkZe90VZbjRarL/LaFadRmSb1KmTHqB1OJpiK0E\nq6bTu7OaFLUCgYEA26Br/6covXUantO2adhRU5ZOo3qtamYW8ISW79GGARdduHZm\nSPhErFeDH7C2MoPTON8h80bfYhcS/Jq/9+Zs7rlNRTnsGYFd52mS+3Zwt5Vb3Fkq\nLLtOH7htua7+TamgbM7RaGZr2AgZsx2PBpk1BGdbvog5y3S/sCgh5L4wV7kCgYEA\nhOdidEqQx5AjczMcRGReQ8nta5gWpcwoGpV/N8iWZvn067VC4lEWu3jJBcfpgb+L\nfdsZMAa1Lz49NL10Yn4WnNMRSLiNvUHTa2wCODRMQlW43QBuicH/NrdwyrvMVb7j\n5bG2xtA6n9YOjniVw9a1nsSDbDHwZeHRT3YpDl3q5p0CgYAbaQ7ZrZHJ37ahX+tm\nQjTUWudqH6erVCkAxfQ5cWwU7GcFKR5bP/J9CTK1CJtSG5QId9w3d4h3XV8gEmfI\nOHwXRsnPmbK14CpuiOQjNld3B24phH2AxhPZ0bo5L5Srrmr9aQax5abCCik966Zy\nNucymeoR9BdptkL1UmQL1TXj1Q==\n-----END PRIVATE KEY-----\n', 'ca_content': '-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQFzcSb9RQSTK/9dHx7haxbDANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMTcz\nNzEyNmYtZDQ1MDQ5MzItYmZmNWQxZjEtZWUxNmIxNmIwHhcNMjUwNzEyMTY0MDA4\nWhcNMjYwNzEyMTY0MDA3WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCwMZ2E913mCgKI9cR2Jl145F7AP7dN70v6\nziqVTX9g68qTzHzZYLM+f1S4s+EeMb1NeODfXUeYxXy4jPJxHQoUADa5Yudz+FrG\noQc6OdxK0jknFZO4y9hArs+WOSPmmcz25hD6xU1QyTOa+a3SBerk9JP2Cv8a8oZh\nC9Cgb+ieynZKcaTSqjaawukokJ0vDLN3mVW04WEx65YHGyWMtCJZhcJuPbkO6Gyx\nD8h6RSXb8N7QXyFcHJ1wj/WxOpoUxG06toV2jTD4XLKNSH7GGZTjVc0PhGMwo+LD\nQaXj1cA1pYEothbimyfmyFJaAFI77iDZalDPg83HQws33LIoKkA7AgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUNp5x\nc25ZzRgn8npwzMo5C3Ur214wHwYDVR0jBBgwFoAUaFFYMbBzBx7Bza33F7YPH+r8\npQEwDQYJKoZIhvcNAQELBQADggEBALTlnZId4Js0qJkz3apsnQkKHsDE/2A5LBJ8\ndyzBiD+HD35LVWR/WM/f94UWnnDRE4hUfXjvvSBVFgkHwewJt37f+B/HmgO6AAza\nlFlZEUAwFTyy+3hRwWw+NTwQXPclRh3pc1JtLOGklkTKaMSDM/N+m/UKZgtGXoZG\nSJesW3MiaXPsTZKNLhXMA+AdjMpVtTw8/j4aVRC1Pkz0shvYGeWU6pcU9zbQuGWO\nIb6VO5ieycua8L2x5EaXAdLIwU/bRrMqhLEMmOi1jzu0+fTin3Hc5ENmlurnX2FC\nniYQqRkHB/LucV7UNaT1kEH/ahbFQFD+nwNjt948LWag09aTFpU=\n-----END CERTIFICATE-----\n'}) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "getcert", "stop-tracking", "-f", "/etc/pki/tls/certs/quadlet_demo.crt" ], "delta": "0:00:00.031596", "end": "2025-07-12 12:40:10.043540", "item": { "ca": "/etc/pki/tls/certs/quadlet_demo.crt", "ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQFzcSb9RQSTK/9dHx7haxbDANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMTcz\nNzEyNmYtZDQ1MDQ5MzItYmZmNWQxZjEtZWUxNmIxNmIwHhcNMjUwNzEyMTY0MDA4\nWhcNMjYwNzEyMTY0MDA3WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCwMZ2E913mCgKI9cR2Jl145F7AP7dN70v6\nziqVTX9g68qTzHzZYLM+f1S4s+EeMb1NeODfXUeYxXy4jPJxHQoUADa5Yudz+FrG\noQc6OdxK0jknFZO4y9hArs+WOSPmmcz25hD6xU1QyTOa+a3SBerk9JP2Cv8a8oZh\nC9Cgb+ieynZKcaTSqjaawukokJ0vDLN3mVW04WEx65YHGyWMtCJZhcJuPbkO6Gyx\nD8h6RSXb8N7QXyFcHJ1wj/WxOpoUxG06toV2jTD4XLKNSH7GGZTjVc0PhGMwo+LD\nQaXj1cA1pYEothbimyfmyFJaAFI77iDZalDPg83HQws33LIoKkA7AgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUNp5x\nc25ZzRgn8npwzMo5C3Ur214wHwYDVR0jBBgwFoAUaFFYMbBzBx7Bza33F7YPH+r8\npQEwDQYJKoZIhvcNAQELBQADggEBALTlnZId4Js0qJkz3apsnQkKHsDE/2A5LBJ8\ndyzBiD+HD35LVWR/WM/f94UWnnDRE4hUfXjvvSBVFgkHwewJt37f+B/HmgO6AAza\nlFlZEUAwFTyy+3hRwWw+NTwQXPclRh3pc1JtLOGklkTKaMSDM/N+m/UKZgtGXoZG\nSJesW3MiaXPsTZKNLhXMA+AdjMpVtTw8/j4aVRC1Pkz0shvYGeWU6pcU9zbQuGWO\nIb6VO5ieycua8L2x5EaXAdLIwU/bRrMqhLEMmOi1jzu0+fTin3Hc5ENmlurnX2FC\nniYQqRkHB/LucV7UNaT1kEH/ahbFQFD+nwNjt948LWag09aTFpU=\n-----END CERTIFICATE-----\n", "cert": "/etc/pki/tls/certs/quadlet_demo.crt", "cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQFzcSb9RQSTK/9dHx7haxbDANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMTcz\nNzEyNmYtZDQ1MDQ5MzItYmZmNWQxZjEtZWUxNmIxNmIwHhcNMjUwNzEyMTY0MDA4\nWhcNMjYwNzEyMTY0MDA3WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCwMZ2E913mCgKI9cR2Jl145F7AP7dN70v6\nziqVTX9g68qTzHzZYLM+f1S4s+EeMb1NeODfXUeYxXy4jPJxHQoUADa5Yudz+FrG\noQc6OdxK0jknFZO4y9hArs+WOSPmmcz25hD6xU1QyTOa+a3SBerk9JP2Cv8a8oZh\nC9Cgb+ieynZKcaTSqjaawukokJ0vDLN3mVW04WEx65YHGyWMtCJZhcJuPbkO6Gyx\nD8h6RSXb8N7QXyFcHJ1wj/WxOpoUxG06toV2jTD4XLKNSH7GGZTjVc0PhGMwo+LD\nQaXj1cA1pYEothbimyfmyFJaAFI77iDZalDPg83HQws33LIoKkA7AgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUNp5x\nc25ZzRgn8npwzMo5C3Ur214wHwYDVR0jBBgwFoAUaFFYMbBzBx7Bza33F7YPH+r8\npQEwDQYJKoZIhvcNAQELBQADggEBALTlnZId4Js0qJkz3apsnQkKHsDE/2A5LBJ8\ndyzBiD+HD35LVWR/WM/f94UWnnDRE4hUfXjvvSBVFgkHwewJt37f+B/HmgO6AAza\nlFlZEUAwFTyy+3hRwWw+NTwQXPclRh3pc1JtLOGklkTKaMSDM/N+m/UKZgtGXoZG\nSJesW3MiaXPsTZKNLhXMA+AdjMpVtTw8/j4aVRC1Pkz0shvYGeWU6pcU9zbQuGWO\nIb6VO5ieycua8L2x5EaXAdLIwU/bRrMqhLEMmOi1jzu0+fTin3Hc5ENmlurnX2FC\nniYQqRkHB/LucV7UNaT1kEH/ahbFQFD+nwNjt948LWag09aTFpU=\n-----END CERTIFICATE-----\n", "key": "/etc/pki/tls/private/quadlet_demo.key", "key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQCwMZ2E913mCgKI\n9cR2Jl145F7AP7dN70v6ziqVTX9g68qTzHzZYLM+f1S4s+EeMb1NeODfXUeYxXy4\njPJxHQoUADa5Yudz+FrGoQc6OdxK0jknFZO4y9hArs+WOSPmmcz25hD6xU1QyTOa\n+a3SBerk9JP2Cv8a8oZhC9Cgb+ieynZKcaTSqjaawukokJ0vDLN3mVW04WEx65YH\nGyWMtCJZhcJuPbkO6GyxD8h6RSXb8N7QXyFcHJ1wj/WxOpoUxG06toV2jTD4XLKN\nSH7GGZTjVc0PhGMwo+LDQaXj1cA1pYEothbimyfmyFJaAFI77iDZalDPg83HQws3\n3LIoKkA7AgMBAAECggEBAK/u2vsKhhhNpnpb7fCMHom4f2Q4OzD24Udu1dZ/q9F7\noKw0Y/YrONAPFOnZs9ASmpPhP6nJ2tQRXphpJS0NciLGk95JhBOoyfvMjwr0+6M6\nhxcf1IGSCj2MUHwQUmjQ6nkfAZVlre4c+gqlhYtRnVtBXP0xvcmKoeQFiCyrkfv7\nRerrBBdLDbJ7sozEWawD4lowhw+bKxUtp5DZM2m5ohJvqiVls3KZwmHN/tMsTBFb\nkYL/yW9n4qqYS6C8DoGlKdF0mQDuybWO5KMrpPvlDrgHwW89Wcmn30F4JYN3P3w6\nSip2yNd9VpLq4kzAnKu8gw4wDFCeObTQ6OpNWCfZXwECgYEA3OkxxfUAELanNB+0\nTTFQXzT01TXMwQIATxDkh6HJ+m0YwJH3aqDUX9p3wPZDb1NFV6Bs9CM27oB7y2+2\nZWTmm3T9pwsl36Vw/H5F1LEjHibVFgGxCK5drhT0jR4wzRheQlm7aWhLDdyFELwZ\nWcmCu6rx5PPSP8Uz3eUyQ3xBhy8CgYEAzC4bwlCmH2QiUntwYRPHCeiv44QURV2D\no1pbZ3OQWnNN2Zgc5g6jDXtFZ+b7tQp0j7SSnPOaYKA7QrGmklVILjHC979fA5Fq\ni2z1nQiHwpveAJJmSJSLZbdZFkkZe90VZbjRarL/LaFadRmSb1KmTHqB1OJpiK0E\nq6bTu7OaFLUCgYEA26Br/6covXUantO2adhRU5ZOo3qtamYW8ISW79GGARdduHZm\nSPhErFeDH7C2MoPTON8h80bfYhcS/Jq/9+Zs7rlNRTnsGYFd52mS+3Zwt5Vb3Fkq\nLLtOH7htua7+TamgbM7RaGZr2AgZsx2PBpk1BGdbvog5y3S/sCgh5L4wV7kCgYEA\nhOdidEqQx5AjczMcRGReQ8nta5gWpcwoGpV/N8iWZvn067VC4lEWu3jJBcfpgb+L\nfdsZMAa1Lz49NL10Yn4WnNMRSLiNvUHTa2wCODRMQlW43QBuicH/NrdwyrvMVb7j\n5bG2xtA6n9YOjniVw9a1nsSDbDHwZeHRT3YpDl3q5p0CgYAbaQ7ZrZHJ37ahX+tm\nQjTUWudqH6erVCkAxfQ5cWwU7GcFKR5bP/J9CTK1CJtSG5QId9w3d4h3XV8gEmfI\nOHwXRsnPmbK14CpuiOQjNld3B24phH2AxhPZ0bo5L5Srrmr9aQax5abCCik966Zy\nNucymeoR9BdptkL1UmQL1TXj1Q==\n-----END PRIVATE KEY-----\n" }, "rc": 0, "start": "2025-07-12 12:40:10.011944" } STDOUT: Request "20250712164008" removed. TASK [fedora.linux_system_roles.certificate : Remove files] ******************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:174 Saturday 12 July 2025 12:40:10 -0400 (0:00:00.376) 0:00:14.372 ********* changed: [managed-node2] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => { "ansible_loop_var": "item", "changed": true, "item": "/etc/pki/tls/certs/quadlet_demo.crt", "path": "/etc/pki/tls/certs/quadlet_demo.crt", "state": "absent" } changed: [managed-node2] => (item=/etc/pki/tls/private/quadlet_demo.key) => { "ansible_loop_var": "item", "changed": true, "item": "/etc/pki/tls/private/quadlet_demo.key", "path": "/etc/pki/tls/private/quadlet_demo.key", "state": "absent" } ok: [managed-node2] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => { "ansible_loop_var": "item", "changed": false, "item": "/etc/pki/tls/certs/quadlet_demo.crt", "path": "/etc/pki/tls/certs/quadlet_demo.crt", "state": "absent" } TASK [Run the role] ************************************************************ task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:62 Saturday 12 July 2025 12:40:11 -0400 (0:00:01.084) 0:00:15.457 ********* TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 12 July 2025 12:40:11 -0400 (0:00:00.133) 0:00:15.590 ********* included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 12 July 2025 12:40:11 -0400 (0:00:00.049) 0:00:15.640 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 12 July 2025 12:40:11 -0400 (0:00:00.025) 0:00:15.665 ********* ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 12 July 2025 12:40:11 -0400 (0:00:00.373) 0:00:16.039 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 12 July 2025 12:40:11 -0400 (0:00:00.029) 0:00:16.069 ********* ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 12 July 2025 12:40:12 -0400 (0:00:00.375) 0:00:16.444 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 12 July 2025 12:40:12 -0400 (0:00:00.026) 0:00:16.471 ********* ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node2] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 12 July 2025 12:40:12 -0400 (0:00:00.067) 0:00:16.539 ********* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 12 July 2025 12:40:14 -0400 (0:00:01.830) 0:00:18.370 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 12 July 2025 12:40:14 -0400 (0:00:00.041) 0:00:18.412 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 12 July 2025 12:40:14 -0400 (0:00:00.047) 0:00:18.459 ********* skipping: [managed-node2] => {} TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 12 July 2025 12:40:14 -0400 (0:00:00.035) 0:00:18.495 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 12 July 2025 12:40:14 -0400 (0:00:00.028) 0:00:18.524 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 12 July 2025 12:40:14 -0400 (0:00:00.030) 0:00:18.555 ********* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.031872", "end": "2025-07-12 12:40:14.605789", "rc": 0, "start": "2025-07-12 12:40:14.573917" } STDOUT: podman version 4.9.4-dev TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 12 July 2025 12:40:14 -0400 (0:00:00.392) 0:00:18.947 ********* ok: [managed-node2] => { "ansible_facts": { "podman_version": "4.9.4-dev" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 12 July 2025 12:40:14 -0400 (0:00:00.036) 0:00:18.984 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 12 July 2025 12:40:14 -0400 (0:00:00.029) 0:00:19.014 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 12 July 2025 12:40:14 -0400 (0:00:00.126) 0:00:19.140 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 12 July 2025 12:40:14 -0400 (0:00:00.101) 0:00:19.242 ********* included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 12 July 2025 12:40:15 -0400 (0:00:00.095) 0:00:19.338 ********* ok: [managed-node2] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "root", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 12 July 2025 12:40:15 -0400 (0:00:00.530) 0:00:19.868 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 12 July 2025 12:40:15 -0400 (0:00:00.061) 0:00:19.930 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 12 July 2025 12:40:15 -0400 (0:00:00.065) 0:00:19.995 ********* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1752338066.7763715, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1752338037.94126, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986657, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "481438935", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 12 July 2025 12:40:16 -0400 (0:00:00.391) 0:00:20.387 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 12 July 2025 12:40:16 -0400 (0:00:00.042) 0:00:20.429 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 12 July 2025 12:40:16 -0400 (0:00:00.038) 0:00:20.467 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 12 July 2025 12:40:16 -0400 (0:00:00.040) 0:00:20.508 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 12 July 2025 12:40:16 -0400 (0:00:00.038) 0:00:20.546 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 12 July 2025 12:40:16 -0400 (0:00:00.035) 0:00:20.581 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 12 July 2025 12:40:16 -0400 (0:00:00.031) 0:00:20.613 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 12 July 2025 12:40:16 -0400 (0:00:00.031) 0:00:20.644 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 12 July 2025 12:40:16 -0400 (0:00:00.031) 0:00:20.675 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Saturday 12 July 2025 12:40:16 -0400 (0:00:00.078) 0:00:20.754 ********* included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 12 July 2025 12:40:16 -0400 (0:00:00.058) 0:00:20.813 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 12 July 2025 12:40:16 -0400 (0:00:00.032) 0:00:20.846 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Saturday 12 July 2025 12:40:16 -0400 (0:00:00.059) 0:00:20.905 ********* included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 12 July 2025 12:40:16 -0400 (0:00:00.064) 0:00:20.970 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 12 July 2025 12:40:16 -0400 (0:00:00.039) 0:00:21.009 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Saturday 12 July 2025 12:40:16 -0400 (0:00:00.049) 0:00:21.059 ********* included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Saturday 12 July 2025 12:40:16 -0400 (0:00:00.073) 0:00:21.132 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Saturday 12 July 2025 12:40:16 -0400 (0:00:00.035) 0:00:21.167 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Saturday 12 July 2025 12:40:16 -0400 (0:00:00.037) 0:00:21.205 ********* included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Saturday 12 July 2025 12:40:17 -0400 (0:00:00.064) 0:00:21.269 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Saturday 12 July 2025 12:40:17 -0400 (0:00:00.030) 0:00:21.300 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Saturday 12 July 2025 12:40:17 -0400 (0:00:00.029) 0:00:21.329 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Saturday 12 July 2025 12:40:17 -0400 (0:00:00.030) 0:00:21.359 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Saturday 12 July 2025 12:40:17 -0400 (0:00:00.029) 0:00:21.388 ********* TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Saturday 12 July 2025 12:40:17 -0400 (0:00:00.133) 0:00:21.522 ********* included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Saturday 12 July 2025 12:40:17 -0400 (0:00:00.149) 0:00:21.672 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Saturday 12 July 2025 12:40:17 -0400 (0:00:00.061) 0:00:21.734 ********* ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Saturday 12 July 2025 12:40:17 -0400 (0:00:00.420) 0:00:22.154 ********* ok: [managed-node2] => { "ansible_facts": { "__firewall_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Saturday 12 July 2025 12:40:17 -0400 (0:00:00.054) 0:00:22.208 ********* ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Saturday 12 July 2025 12:40:18 -0400 (0:00:00.394) 0:00:22.603 ********* ok: [managed-node2] => { "ansible_facts": { "__firewall_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Run systemctl] ********************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:34 Saturday 12 July 2025 12:40:18 -0400 (0:00:00.058) 0:00:22.662 ********* ok: [managed-node2] => { "changed": false, "cmd": [ "systemctl", "is-system-running" ], "delta": "0:00:00.008508", "end": "2025-07-12 12:40:18.710943", "failed_when_result": false, "rc": 0, "start": "2025-07-12 12:40:18.702435" } STDOUT: running TASK [fedora.linux_system_roles.firewall : Require installed systemd] ********** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:41 Saturday 12 July 2025 12:40:18 -0400 (0:00:00.379) 0:00:23.041 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:46 Saturday 12 July 2025 12:40:18 -0400 (0:00:00.033) 0:00:23.074 ********* ok: [managed-node2] => { "ansible_facts": { "__firewall_is_booted": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:51 Saturday 12 July 2025 12:40:18 -0400 (0:00:00.033) 0:00:23.107 ********* ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: firewalld TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:63 Saturday 12 July 2025 12:40:21 -0400 (0:00:02.837) 0:00:25.945 ********* skipping: [managed-node2] => {} TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:68 Saturday 12 July 2025 12:40:21 -0400 (0:00:00.049) 0:00:25.994 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:73 Saturday 12 July 2025 12:40:21 -0400 (0:00:00.050) 0:00:26.045 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check which conflicting services are enabled] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Saturday 12 July 2025 12:40:21 -0400 (0:00:00.052) 0:00:26.097 ********* skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "item": "ufw", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:14 Saturday 12 July 2025 12:40:21 -0400 (0:00:00.063) 0:00:26.161 ********* skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'item': 'nftables', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "item": { "ansible_loop_var": "item", "changed": false, "item": "nftables", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'item': 'iptables', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "item": { "ansible_loop_var": "item", "changed": false, "item": "iptables", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'item': 'ufw', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "item": { "ansible_loop_var": "item", "changed": false, "item": "ufw", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:24 Saturday 12 July 2025 12:40:21 -0400 (0:00:00.068) 0:00:26.230 ********* ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "ActiveEnterTimestamp": "Sat 2025-07-12 12:34:34 EDT", "ActiveEnterTimestampMonotonic": "334380508", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "dbus.socket polkit.service sysinit.target system.slice basic.target dbus.service", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-07-12 12:34:33 EDT", "AssertTimestampMonotonic": "333529803", "Before": "multi-user.target network-pre.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-07-12 12:34:33 EDT", "ConditionTimestampMonotonic": "333529801", "ConfigurationDirectoryMode": "0755", "Conflicts": "iptables.service shutdown.target nftables.service ipset.service ebtables.service ip6tables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "man:firewalld(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "12934", "ExecMainStartTimestamp": "Sat 2025-07-12 12:34:33 EDT", "ExecMainStartTimestampMonotonic": "333544279", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-07-12 12:34:33 EDT", "InactiveExitTimestampMonotonic": "333544451", "InvocationID": "23956a138bd04000a793482a1bdfdae2", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "12934", "MemoryAccounting": "yes", "MemoryCurrent": "42549248", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sat 2025-07-12 12:34:34 EDT", "StateChangeTimestampMonotonic": "334380508", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogTimestamp": "Sat 2025-07-12 12:34:34 EDT", "WatchdogTimestampMonotonic": "334380506", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:30 Saturday 12 July 2025 12:40:22 -0400 (0:00:00.534) 0:00:26.765 ********* ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "ActiveEnterTimestamp": "Sat 2025-07-12 12:34:34 EDT", "ActiveEnterTimestampMonotonic": "334380508", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "dbus.socket polkit.service sysinit.target system.slice basic.target dbus.service", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-07-12 12:34:33 EDT", "AssertTimestampMonotonic": "333529803", "Before": "multi-user.target network-pre.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-07-12 12:34:33 EDT", "ConditionTimestampMonotonic": "333529801", "ConfigurationDirectoryMode": "0755", "Conflicts": "iptables.service shutdown.target nftables.service ipset.service ebtables.service ip6tables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "man:firewalld(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "12934", "ExecMainStartTimestamp": "Sat 2025-07-12 12:34:33 EDT", "ExecMainStartTimestampMonotonic": "333544279", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-07-12 12:34:33 EDT", "InactiveExitTimestampMonotonic": "333544451", "InvocationID": "23956a138bd04000a793482a1bdfdae2", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "12934", "MemoryAccounting": "yes", "MemoryCurrent": "42549248", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sat 2025-07-12 12:34:34 EDT", "StateChangeTimestampMonotonic": "334380508", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogTimestamp": "Sat 2025-07-12 12:34:34 EDT", "WatchdogTimestampMonotonic": "334380506", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:36 Saturday 12 July 2025 12:40:23 -0400 (0:00:00.547) 0:00:27.312 ********* ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/libexec/platform-python", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:45 Saturday 12 July 2025 12:40:23 -0400 (0:00:00.062) 0:00:27.375 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:58 Saturday 12 July 2025 12:40:23 -0400 (0:00:00.085) 0:00:27.460 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:74 Saturday 12 July 2025 12:40:23 -0400 (0:00:00.039) 0:00:27.500 ********* changed: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "port": "8000/tcp", "state": "enabled" } } changed: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "port": "9000/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:126 Saturday 12 July 2025 12:40:24 -0400 (0:00:01.276) 0:00:28.777 ********* skipping: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "item": { "port": "8000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "item": { "port": "9000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:137 Saturday 12 July 2025 12:40:24 -0400 (0:00:00.050) 0:00:28.827 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:146 Saturday 12 July 2025 12:40:24 -0400 (0:00:00.033) 0:00:28.860 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:152 Saturday 12 July 2025 12:40:24 -0400 (0:00:00.039) 0:00:28.900 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:161 Saturday 12 July 2025 12:40:24 -0400 (0:00:00.031) 0:00:28.932 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:172 Saturday 12 July 2025 12:40:24 -0400 (0:00:00.030) 0:00:28.962 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:178 Saturday 12 July 2025 12:40:24 -0400 (0:00:00.030) 0:00:28.993 ********* skipping: [managed-node2] => {} TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Saturday 12 July 2025 12:40:24 -0400 (0:00:00.029) 0:00:29.023 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Saturday 12 July 2025 12:40:24 -0400 (0:00:00.030) 0:00:29.053 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Saturday 12 July 2025 12:40:24 -0400 (0:00:00.028) 0:00:29.082 ********* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Saturday 12 July 2025 12:40:24 -0400 (0:00:00.032) 0:00:29.115 ********* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Saturday 12 July 2025 12:40:24 -0400 (0:00:00.042) 0:00:29.158 ********* included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 12 July 2025 12:40:25 -0400 (0:00:00.188) 0:00:29.346 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 12 July 2025 12:40:25 -0400 (0:00:00.033) 0:00:29.380 ********* included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 12 July 2025 12:40:25 -0400 (0:00:00.054) 0:00:29.434 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 12 July 2025 12:40:25 -0400 (0:00:00.037) 0:00:29.472 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 12 July 2025 12:40:25 -0400 (0:00:00.042) 0:00:29.515 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 12 July 2025 12:40:25 -0400 (0:00:00.057) 0:00:29.572 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 12 July 2025 12:40:25 -0400 (0:00:00.048) 0:00:29.620 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 12 July 2025 12:40:25 -0400 (0:00:00.049) 0:00:29.669 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 12 July 2025 12:40:25 -0400 (0:00:00.046) 0:00:29.716 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 12 July 2025 12:40:25 -0400 (0:00:00.035) 0:00:29.751 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 12 July 2025 12:40:25 -0400 (0:00:00.036) 0:00:29.787 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 12 July 2025 12:40:25 -0400 (0:00:00.037) 0:00:29.825 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 12 July 2025 12:40:25 -0400 (0:00:00.035) 0:00:29.860 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 12 July 2025 12:40:25 -0400 (0:00:00.029) 0:00:29.890 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 12 July 2025 12:40:25 -0400 (0:00:00.038) 0:00:29.929 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 12 July 2025 12:40:25 -0400 (0:00:00.075) 0:00:30.005 ********* included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 12 July 2025 12:40:25 -0400 (0:00:00.082) 0:00:30.087 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 12 July 2025 12:40:25 -0400 (0:00:00.051) 0:00:30.139 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 12 July 2025 12:40:25 -0400 (0:00:00.051) 0:00:30.190 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 12 July 2025 12:40:25 -0400 (0:00:00.052) 0:00:30.242 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 12 July 2025 12:40:26 -0400 (0:00:00.038) 0:00:30.281 ********* fatal: [managed-node2]: FAILED! => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result" } TASK [Dump journal] ************************************************************ task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:142 Saturday 12 July 2025 12:40:26 -0400 (0:00:00.040) 0:00:30.321 ********* fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.027579", "end": "2025-07-12 12:40:26.369978", "failed_when_result": true, "rc": 0, "start": "2025-07-12 12:40:26.342399" } STDOUT: -- Logs begin at Sat 2025-07-12 12:29:00 EDT, end at Sat 2025-07-12 12:40:26 EDT. -- Jul 12 12:33:58 managed-node2 systemd[1]: Mounting FUSE Control File System... -- Subject: Unit sys-fs-fuse-connections.mount has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sys-fs-fuse-connections.mount has begun starting up. Jul 12 12:33:58 managed-node2 systemd[1]: Mounted FUSE Control File System. -- Subject: Unit sys-fs-fuse-connections.mount has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sys-fs-fuse-connections.mount has finished starting up. -- -- The start-up result is done. Jul 12 12:33:59 managed-node2 dbus-daemon[601]: [system] Reloaded configuration Jul 12 12:33:59 managed-node2 dbus-daemon[601]: [system] Reloaded configuration Jul 12 12:34:23 managed-node2 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. -- Subject: Unit run-rb64140c001d3434a8ef1cd16a214e1b7.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit run-rb64140c001d3434a8ef1cd16a214e1b7.service has finished starting up. -- -- The start-up result is done. Jul 12 12:34:23 managed-node2 systemd[1]: cgroup compatibility translation between legacy and unified hierarchy settings activated. See cgroup-compat debug messages for details. Jul 12 12:34:23 managed-node2 systemd[1]: Starting man-db-cache-update.service... -- Subject: Unit man-db-cache-update.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has begun starting up. Jul 12 12:34:23 managed-node2 systemd[1]: Reloading. Jul 12 12:34:24 managed-node2 sudo[9345]: pam_unix(sudo:session): session closed for user root Jul 12 12:34:25 managed-node2 systemd[1]: man-db-cache-update.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit man-db-cache-update.service has successfully entered the 'dead' state. Jul 12 12:34:25 managed-node2 systemd[1]: Started man-db-cache-update.service. -- Subject: Unit man-db-cache-update.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has finished starting up. -- -- The start-up result is done. Jul 12 12:34:25 managed-node2 systemd[1]: run-rb64140c001d3434a8ef1cd16a214e1b7.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-rb64140c001d3434a8ef1cd16a214e1b7.service has successfully entered the 'dead' state. Jul 12 12:34:25 managed-node2 platform-python[11899]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:34:26 managed-node2 platform-python[12028]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 12 12:34:26 managed-node2 platform-python[12152]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:34:28 managed-node2 platform-python[12277]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:34:28 managed-node2 platform-python[12400]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:34:29 managed-node2 platform-python[12523]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:34:29 managed-node2 platform-python[12647]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:34:32 managed-node2 platform-python[12770]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 12 12:34:33 managed-node2 platform-python[12897]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 12 12:34:33 managed-node2 systemd[1]: Reloading. Jul 12 12:34:33 managed-node2 systemd[1]: Starting firewalld - dynamic firewall daemon... -- Subject: Unit firewalld.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit firewalld.service has begun starting up. Jul 12 12:34:34 managed-node2 systemd[1]: Started firewalld - dynamic firewall daemon. -- Subject: Unit firewalld.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit firewalld.service has finished starting up. -- -- The start-up result is done. Jul 12 12:34:34 managed-node2 firewalld[12934]: WARNING: AllowZoneDrifting is enabled. This is considered an insecure configuration option. It will be removed in a future release. Please consider disabling it now. Jul 12 12:34:35 managed-node2 platform-python[13124]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 12 12:34:36 managed-node2 platform-python[13247]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:34:36 managed-node2 platform-python[13370]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:34:37 managed-node2 platform-python[13493]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:34:40 managed-node2 platform-python[13616]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:34:42 managed-node2 platform-python[13739]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:34:45 managed-node2 dbus-daemon[601]: [system] Reloaded configuration Jul 12 12:34:45 managed-node2 dbus-daemon[601]: [system] Reloaded configuration Jul 12 12:34:45 managed-node2 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. -- Subject: Unit run-rc4136976cbe94ee39dd82aa6d795790f.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit run-rc4136976cbe94ee39dd82aa6d795790f.service has finished starting up. -- -- The start-up result is done. Jul 12 12:34:45 managed-node2 systemd[1]: Starting man-db-cache-update.service... -- Subject: Unit man-db-cache-update.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has begun starting up. Jul 12 12:34:46 managed-node2 systemd[1]: man-db-cache-update.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit man-db-cache-update.service has successfully entered the 'dead' state. Jul 12 12:34:46 managed-node2 systemd[1]: Started man-db-cache-update.service. -- Subject: Unit man-db-cache-update.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has finished starting up. -- -- The start-up result is done. Jul 12 12:34:46 managed-node2 systemd[1]: run-rc4136976cbe94ee39dd82aa6d795790f.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-rc4136976cbe94ee39dd82aa6d795790f.service has successfully entered the 'dead' state. Jul 12 12:34:46 managed-node2 platform-python[14345]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 12 12:34:47 managed-node2 platform-python[14493]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:34:48 managed-node2 platform-python[14617]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 12 12:34:49 managed-node2 kernel: SELinux: Converting 460 SID table entries... Jul 12 12:34:49 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Jul 12 12:34:49 managed-node2 kernel: SELinux: policy capability open_perms=1 Jul 12 12:34:49 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Jul 12 12:34:49 managed-node2 kernel: SELinux: policy capability always_check_network=0 Jul 12 12:34:49 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Jul 12 12:34:49 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jul 12 12:34:49 managed-node2 dbus-daemon[601]: [system] Reloaded configuration Jul 12 12:34:50 managed-node2 platform-python[14744]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 12 12:34:55 managed-node2 platform-python[14867]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:34:57 managed-node2 platform-python[14992]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:34:57 managed-node2 platform-python[15115]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:34:57 managed-node2 platform-python[15238]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 12 12:34:58 managed-node2 platform-python[15337]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/nopull.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752338097.5592623-9962-32376786540712/source _original_basename=tmp571i0p6f follow=False checksum=d5dc917e3cae36de03aa971a17ac473f86fdf934 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Jul 12 12:34:58 managed-node2 platform-python[15462]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:34:58 managed-node2 kernel: evm: overlay not supported Jul 12 12:34:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay-metacopy\x2dcheck2773103887-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-metacopy\x2dcheck2773103887-merged.mount has successfully entered the 'dead' state. Jul 12 12:34:59 managed-node2 systemd[1]: Created slice machine.slice. -- Subject: Unit machine.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine.slice has finished starting up. -- -- The start-up result is done. Jul 12 12:34:59 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice. -- Subject: Unit machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice has finished starting up. -- -- The start-up result is done. Jul 12 12:34:59 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:35:03 managed-node2 platform-python[15788]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:35:04 managed-node2 platform-python[15917]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:35:07 managed-node2 platform-python[16042]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:35:11 managed-node2 platform-python[16165]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 12 12:35:11 managed-node2 platform-python[16292]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 12 12:35:12 managed-node2 platform-python[16419]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 12 12:35:14 managed-node2 platform-python[16542]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:35:17 managed-node2 platform-python[16665]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:35:19 managed-node2 platform-python[16788]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:35:22 managed-node2 platform-python[16911]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 12 12:35:24 managed-node2 platform-python[17059]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 12 12:35:25 managed-node2 platform-python[17182]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 12 12:35:30 managed-node2 platform-python[17305]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:35:31 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:35:32 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:35:32 managed-node2 platform-python[17567]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:35:32 managed-node2 platform-python[17690]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:35:33 managed-node2 platform-python[17813]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 12 12:35:33 managed-node2 platform-python[17912]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/bogus.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752338133.0421798-11440-200522690369055/source _original_basename=tmpx4spj4rr follow=False checksum=f8266a972ed3be7e204d2a67883fe3a22b8dbf18 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Jul 12 12:35:34 managed-node2 platform-python[18037]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:35:34 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice. -- Subject: Unit machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice has finished starting up. -- -- The start-up result is done. Jul 12 12:35:34 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:35:37 managed-node2 platform-python[18324]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:35:38 managed-node2 platform-python[18453]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:35:40 managed-node2 platform-python[18578]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:35:43 managed-node2 platform-python[18701]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 12 12:35:44 managed-node2 platform-python[18828]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 12 12:35:45 managed-node2 platform-python[18955]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 12 12:35:47 managed-node2 platform-python[19078]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:35:50 managed-node2 platform-python[19201]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:35:52 managed-node2 platform-python[19324]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:35:55 managed-node2 platform-python[19447]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 12 12:35:57 managed-node2 platform-python[19595]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 12 12:35:58 managed-node2 platform-python[19718]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 12 12:36:02 managed-node2 platform-python[19841]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:36:03 managed-node2 platform-python[19966]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:36:04 managed-node2 platform-python[20090]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 12 12:36:04 managed-node2 platform-python[20217]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:36:05 managed-node2 platform-python[20342]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:36:05 managed-node2 platform-python[20342]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/nopull.yml Jul 12 12:36:05 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice. -- Subject: Unit machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice has finished shutting down. Jul 12 12:36:05 managed-node2 systemd[1]: machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice: Consumed 0 CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice completed and consumed the indicated resources. Jul 12 12:36:05 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:36:05 managed-node2 platform-python[20480]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:36:06 managed-node2 platform-python[20603]: ansible-command Invoked with _raw_params=podman image prune -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:36:09 managed-node2 platform-python[20858]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:36:10 managed-node2 platform-python[20987]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:36:13 managed-node2 platform-python[21112]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:36:16 managed-node2 platform-python[21235]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 12 12:36:17 managed-node2 platform-python[21362]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 12 12:36:18 managed-node2 platform-python[21489]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 12 12:36:19 managed-node2 platform-python[21612]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:36:22 managed-node2 platform-python[21735]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:36:25 managed-node2 platform-python[21858]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:36:28 managed-node2 platform-python[21981]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 12 12:36:30 managed-node2 platform-python[22129]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 12 12:36:31 managed-node2 platform-python[22252]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 12 12:36:35 managed-node2 platform-python[22375]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:36:36 managed-node2 platform-python[22500]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:36:37 managed-node2 platform-python[22624]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 12 12:36:37 managed-node2 platform-python[22751]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:36:38 managed-node2 platform-python[22876]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:36:38 managed-node2 platform-python[22876]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/bogus.yml Jul 12 12:36:38 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice. -- Subject: Unit machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice has finished shutting down. Jul 12 12:36:38 managed-node2 systemd[1]: machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice: Consumed 0 CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice completed and consumed the indicated resources. Jul 12 12:36:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:36:38 managed-node2 platform-python[23015]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:36:39 managed-node2 platform-python[23138]: ansible-command Invoked with _raw_params=podman image prune -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:36:39 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:36:42 managed-node2 platform-python[23394]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:36:44 managed-node2 platform-python[23523]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:36:46 managed-node2 platform-python[23648]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:36:50 managed-node2 platform-python[23771]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 12 12:36:50 managed-node2 platform-python[23898]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 12 12:36:51 managed-node2 platform-python[24025]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 12 12:36:52 managed-node2 platform-python[24148]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:36:55 managed-node2 platform-python[24271]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:36:58 managed-node2 platform-python[24394]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:37:01 managed-node2 platform-python[24517]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 12 12:37:03 managed-node2 platform-python[24665]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 12 12:37:04 managed-node2 platform-python[24788]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 12 12:37:08 managed-node2 platform-python[24911]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jul 12 12:37:09 managed-node2 platform-python[25036]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:37:09 managed-node2 platform-python[25161]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:10 managed-node2 platform-python[25285]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:11 managed-node2 platform-python[25409]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:12 managed-node2 platform-python[25533]: ansible-command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jul 12 12:37:12 managed-node2 systemd[1]: Created slice User Slice of UID 3001. -- Subject: Unit user-3001.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-3001.slice has finished starting up. -- -- The start-up result is done. Jul 12 12:37:12 managed-node2 systemd[1]: Starting User runtime directory /run/user/3001... -- Subject: Unit user-runtime-dir@3001.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@3001.service has begun starting up. Jul 12 12:37:12 managed-node2 systemd[1]: Started User runtime directory /run/user/3001. -- Subject: Unit user-runtime-dir@3001.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@3001.service has finished starting up. -- -- The start-up result is done. Jul 12 12:37:12 managed-node2 systemd[1]: Starting User Manager for UID 3001... -- Subject: Unit user@3001.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@3001.service has begun starting up. Jul 12 12:37:12 managed-node2 systemd[25539]: pam_unix(systemd-user:session): session opened for user podman_basic_user by (uid=0) Jul 12 12:37:12 managed-node2 systemd[25539]: Reached target Paths. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:12 managed-node2 systemd[25539]: Started Mark boot as successful after the user session has run 2 minutes. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:12 managed-node2 systemd[25539]: Starting D-Bus User Message Bus Socket. -- Subject: Unit UNIT has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun starting up. Jul 12 12:37:12 managed-node2 systemd[25539]: Reached target Timers. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:12 managed-node2 systemd[25539]: Listening on D-Bus User Message Bus Socket. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:12 managed-node2 systemd[25539]: Reached target Sockets. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:12 managed-node2 systemd[25539]: Reached target Basic System. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:12 managed-node2 systemd[25539]: Reached target Default. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:12 managed-node2 systemd[25539]: Startup finished in 28ms. -- Subject: User manager start-up is now complete -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The user manager instance for user 3001 has been started. All services queued -- for starting have been started. Note that other services might still be starting -- up or be started at any later time. -- -- Startup of the manager took 28872 microseconds. Jul 12 12:37:12 managed-node2 systemd[1]: Started User Manager for UID 3001. -- Subject: Unit user@3001.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@3001.service has finished starting up. -- -- The start-up result is done. Jul 12 12:37:13 managed-node2 platform-python[25674]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:37:13 managed-node2 platform-python[25797]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:37:13 managed-node2 sudo[25920]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-flowetcsnhyltwcqlvhwzynouopxqrjl ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338233.6175296-15753-169052845046334/AnsiballZ_podman_image.py' Jul 12 12:37:13 managed-node2 sudo[25920]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:37:14 managed-node2 systemd[25539]: Started D-Bus User Message Bus. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:14 managed-node2 systemd[25539]: Created slice user.slice. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:14 managed-node2 systemd[25539]: Started podman-25932.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:14 managed-node2 systemd[25539]: Started podman-pause-5a039c99.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:14 managed-node2 systemd[25539]: Started podman-25948.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:14 managed-node2 systemd[25539]: Started podman-25963.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:14 managed-node2 sudo[25920]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:37:15 managed-node2 platform-python[26093]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:37:15 managed-node2 platform-python[26216]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:37:16 managed-node2 platform-python[26339]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 12 12:37:16 managed-node2 platform-python[26438]: ansible-copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752338235.9099538-15874-160470408042927/source _original_basename=tmphfu4mgeo follow=False checksum=effe6499c246b4e7daac7803b02ca2cad861ad5c backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Jul 12 12:37:16 managed-node2 sudo[26563]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ggfotomaneyfnrfutjcomejzhhvgfhsm ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338236.608599-15911-85925477640473/AnsiballZ_podman_play.py' Jul 12 12:37:16 managed-node2 sudo[26563]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:37:16 managed-node2 platform-python[26566]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:37:16 managed-node2 systemd[25539]: Started podman-26574.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:17 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6 Jul 12 12:37:17 managed-node2 systemd[25539]: Started rootless-netns-cfbb367e.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:17 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Jul 12 12:37:17 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): veth14aad36c: link is not ready Jul 12 12:37:17 managed-node2 kernel: cni-podman1: port 1(veth14aad36c) entered blocking state Jul 12 12:37:17 managed-node2 kernel: cni-podman1: port 1(veth14aad36c) entered disabled state Jul 12 12:37:17 managed-node2 kernel: device veth14aad36c entered promiscuous mode Jul 12 12:37:17 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready Jul 12 12:37:17 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready Jul 12 12:37:17 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth14aad36c: link becomes ready Jul 12 12:37:17 managed-node2 kernel: cni-podman1: port 1(veth14aad36c) entered blocking state Jul 12 12:37:17 managed-node2 kernel: cni-podman1: port 1(veth14aad36c) entered forwarding state Jul 12 12:37:17 managed-node2 dnsmasq[26760]: listening on cni-podman1(#3): 10.89.0.1 Jul 12 12:37:17 managed-node2 dnsmasq[26762]: started, version 2.79 cachesize 150 Jul 12 12:37:17 managed-node2 dnsmasq[26762]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Jul 12 12:37:17 managed-node2 dnsmasq[26762]: using local addresses only for domain dns.podman Jul 12 12:37:17 managed-node2 dnsmasq[26762]: reading /etc/resolv.conf Jul 12 12:37:17 managed-node2 dnsmasq[26762]: using local addresses only for domain dns.podman Jul 12 12:37:17 managed-node2 dnsmasq[26762]: using nameserver 10.0.2.3#53 Jul 12 12:37:17 managed-node2 dnsmasq[26762]: using nameserver 10.29.169.13#53 Jul 12 12:37:17 managed-node2 dnsmasq[26762]: using nameserver 10.29.170.12#53 Jul 12 12:37:17 managed-node2 dnsmasq[26762]: using nameserver 10.2.32.1#53 Jul 12 12:37:17 managed-node2 dnsmasq[26762]: read /run/user/3001/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Jul 12 12:37:17 managed-node2 conmon[26776]: conmon db962b9f1559ffd15c96 : failed to write to /proc/self/oom_score_adj: Permission denied Jul 12 12:37:17 managed-node2 conmon[26777]: conmon db962b9f1559ffd15c96 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach} Jul 12 12:37:17 managed-node2 conmon[26777]: conmon db962b9f1559ffd15c96 : terminal_ctrl_fd: 14 Jul 12 12:37:17 managed-node2 conmon[26777]: conmon db962b9f1559ffd15c96 : winsz read side: 17, winsz write side: 18 Jul 12 12:37:17 managed-node2 conmon[26777]: conmon db962b9f1559ffd15c96 : container PID: 26787 Jul 12 12:37:17 managed-node2 conmon[26797]: conmon 8b812a2ec55f9de0cde0 : failed to write to /proc/self/oom_score_adj: Permission denied Jul 12 12:37:17 managed-node2 conmon[26798]: conmon 8b812a2ec55f9de0cde0 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Jul 12 12:37:17 managed-node2 conmon[26798]: conmon 8b812a2ec55f9de0cde0 : terminal_ctrl_fd: 13 Jul 12 12:37:17 managed-node2 conmon[26798]: conmon 8b812a2ec55f9de0cde0 : winsz read side: 16, winsz write side: 17 Jul 12 12:37:17 managed-node2 conmon[26798]: conmon 8b812a2ec55f9de0cde0 : container PID: 26808 Jul 12 12:37:17 managed-node2 platform-python[26566]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jul 12 12:37:17 managed-node2 platform-python[26566]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: 49a038584fa170b91928655aabc3334b950d706880329a96a99914f26bc7c59d Container: 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 Jul 12 12:37:17 managed-node2 platform-python[26566]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-07-12T12:37:16-04:00" level=info msg="/bin/podman filtering at log level debug" time="2025-07-12T12:37:16-04:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-07-12T12:37:16-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-07-12T12:37:16-04:00" level=info msg="Using sqlite as database backend" time="2025-07-12T12:37:16-04:00" level=debug msg="Using graph driver overlay" time="2025-07-12T12:37:16-04:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2025-07-12T12:37:16-04:00" level=debug msg="Using run root /run/user/3001/containers" time="2025-07-12T12:37:16-04:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2025-07-12T12:37:16-04:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2025-07-12T12:37:16-04:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2025-07-12T12:37:16-04:00" level=debug msg="Using transient store: false" time="2025-07-12T12:37:16-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-07-12T12:37:16-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-12T12:37:16-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-12T12:37:16-04:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2025-07-12T12:37:16-04:00" level=debug msg="Cached value indicated that native-diff is usable" time="2025-07-12T12:37:16-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2025-07-12T12:37:16-04:00" level=debug msg="Initializing event backend file" time="2025-07-12T12:37:16-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-07-12T12:37:16-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-07-12T12:37:16-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-07-12T12:37:16-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-07-12T12:37:16-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-07-12T12:37:16-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-07-12T12:37:16-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-07-12T12:37:16-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" time="2025-07-12T12:37:16-04:00" level=info msg="Setting parallel job count to 7" time="2025-07-12T12:37:16-04:00" level=debug msg="Successfully loaded 1 networks" time="2025-07-12T12:37:16-04:00" level=debug msg="found free device name cni-podman1" time="2025-07-12T12:37:16-04:00" level=debug msg="found free ipv4 network subnet 10.89.0.0/24" time="2025-07-12T12:37:16-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-12 12:37:16.97600692 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-07-12T12:37:16-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-07-12T12:37:16-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-12T12:37:16-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:16-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-12T12:37:16-04:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\" does not resolve to an image ID" time="2025-07-12T12:37:16-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-12T12:37:16-04:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\" does not resolve to an image ID" time="2025-07-12T12:37:16-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-12T12:37:16-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:16-04:00" level=debug msg="FROM \"scratch\"" time="2025-07-12T12:37:16-04:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2025-07-12T12:37:16-04:00" level=debug msg="Check for idmapped mounts support " time="2025-07-12T12:37:16-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:16-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:17-04:00" level=debug msg="overlay: test mount indicated that volatile is being used" time="2025-07-12T12:37:17-04:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/221eaffe115c1eab98bc107fb1d33a30792204936b8794cac38b4263ec8bc263/empty,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/221eaffe115c1eab98bc107fb1d33a30792204936b8794cac38b4263ec8bc263/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/221eaffe115c1eab98bc107fb1d33a30792204936b8794cac38b4263ec8bc263/work,userxattr,volatile,context=\"system_u:object_r:container_file_t:s0:c99,c874\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Container ID: 74b081262df1d810c422dbcbe1db2f5a2adc384492d57cda98cbd9e90ab37ee1" time="2025-07-12T12:37:17-04:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:copy Args:[/usr/libexec/podman/catatonit /catatonit] Flags:[] Attrs:map[] Message:COPY /usr/libexec/podman/catatonit /catatonit Heredocs:[] Original:COPY /usr/libexec/podman/catatonit /catatonit}" time="2025-07-12T12:37:17-04:00" level=debug msg="COPY []string(nil), imagebuilder.Copy{FromFS:false, From:\"\", Src:[]string{\"/usr/libexec/podman/catatonit\"}, Dest:\"/catatonit\", Download:false, Chown:\"\", Chmod:\"\", Checksum:\"\", Files:[]imagebuilder.File(nil)}" time="2025-07-12T12:37:17-04:00" level=debug msg="added content file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd" time="2025-07-12T12:37:17-04:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:entrypoint Args:[/catatonit -P] Flags:[] Attrs:map[json:true] Message:ENTRYPOINT /catatonit -P Heredocs:[] Original:ENTRYPOINT [\"/catatonit\", \"-P\"]}" time="2025-07-12T12:37:17-04:00" level=debug msg="COMMIT localhost/podman-pause:4.9.4-dev-1708535009" time="2025-07-12T12:37:17-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\"" time="2025-07-12T12:37:17-04:00" level=debug msg="COMMIT \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\"" time="2025-07-12T12:37:17-04:00" level=debug msg="committing image with reference \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\" is allowed by policy" time="2025-07-12T12:37:17-04:00" level=debug msg="layer list: [\"221eaffe115c1eab98bc107fb1d33a30792204936b8794cac38b4263ec8bc263\"]" time="2025-07-12T12:37:17-04:00" level=debug msg="using \"/var/tmp/buildah1838958819\" to hold temporary data" time="2025-07-12T12:37:17-04:00" level=debug msg="Tar with options on /home/podman_basic_user/.local/share/containers/storage/overlay/221eaffe115c1eab98bc107fb1d33a30792204936b8794cac38b4263ec8bc263/diff" time="2025-07-12T12:37:17-04:00" level=debug msg="layer \"221eaffe115c1eab98bc107fb1d33a30792204936b8794cac38b4263ec8bc263\" size is 767488 bytes, uncompressed digest sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690, possibly-compressed digest sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690" time="2025-07-12T12:37:17-04:00" level=debug msg="OCIv1 config = {\"created\":\"2025-07-12T16:37:17.118933835Z\",\"architecture\":\"amd64\",\"os\":\"linux\",\"config\":{\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Entrypoint\":[\"/catatonit\",\"-P\"],\"Labels\":{\"io.buildah.version\":\"1.33.5\"}},\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"]},\"history\":[{\"created\":\"2025-07-12T16:37:17.118347731Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-07-12T16:37:17.122165868Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-07-12T12:37:17-04:00" level=debug msg="OCIv1 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.oci.image.manifest.v1+json\",\"config\":{\"mediaType\":\"application/vnd.oci.image.config.v1+json\",\"digest\":\"sha256:948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\",\"size\":668},\"layers\":[{\"mediaType\":\"application/vnd.oci.image.layer.v1.tar\",\"digest\":\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\",\"size\":767488}],\"annotations\":{\"org.opencontainers.image.base.digest\":\"\",\"org.opencontainers.image.base.name\":\"\"}}" time="2025-07-12T12:37:17-04:00" level=debug msg="Docker v2s2 config = {\"created\":\"2025-07-12T16:37:17.118933835Z\",\"container\":\"74b081262df1d810c422dbcbe1db2f5a2adc384492d57cda98cbd9e90ab37ee1\",\"container_config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":[],\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.33.5\"}},\"config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":[],\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.33.5\"}},\"architecture\":\"amd64\",\"os\":\"linux\",\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"]},\"history\":[{\"created\":\"2025-07-12T16:37:17.118347731Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-07-12T16:37:17.122165868Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-07-12T12:37:17-04:00" level=debug msg="Docker v2s2 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.docker.distribution.manifest.v2+json\",\"config\":{\"mediaType\":\"application/vnd.docker.container.image.v1+json\",\"size\":1342,\"digest\":\"sha256:706c7e5b14dda8248bcff3ec5c250761bd8f764535609aa9365ce9e4b43361c2\"},\"layers\":[{\"mediaType\":\"application/vnd.docker.image.rootfs.diff.tar\",\"size\":767488,\"digest\":\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"}]}" time="2025-07-12T12:37:17-04:00" level=debug msg="Using SQLite blob info cache at /home/podman_basic_user/.local/share/containers/cache/blob-info-cache-v1.sqlite" time="2025-07-12T12:37:17-04:00" level=debug msg="IsRunningImageAllowed for image containers-storage:" time="2025-07-12T12:37:17-04:00" level=debug msg=" Using transport \"containers-storage\" policy section " time="2025-07-12T12:37:17-04:00" level=debug msg=" Requirement 0: allowed" time="2025-07-12T12:37:17-04:00" level=debug msg="Overall: allowed" time="2025-07-12T12:37:17-04:00" level=debug msg="start reading config" time="2025-07-12T12:37:17-04:00" level=debug msg="finished reading config" time="2025-07-12T12:37:17-04:00" level=debug msg="Manifest has MIME type application/vnd.oci.image.manifest.v1+json, ordered candidate list [application/vnd.oci.image.manifest.v1+json, application/vnd.docker.distribution.manifest.v2+json, application/vnd.docker.distribution.manifest.v1+prettyjws, application/vnd.docker.distribution.manifest.v1+json]" time="2025-07-12T12:37:17-04:00" level=debug msg="... will first try using the original manifest unmodified" time="2025-07-12T12:37:17-04:00" level=debug msg="Checking if we can reuse blob sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690: general substitution = true, compression for MIME type \"application/vnd.oci.image.layer.v1.tar\" = true" time="2025-07-12T12:37:17-04:00" level=debug msg="reading layer \"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"" time="2025-07-12T12:37:17-04:00" level=debug msg="No compression detected" time="2025-07-12T12:37:17-04:00" level=debug msg="Using original blob without modification" time="2025-07-12T12:37:17-04:00" level=debug msg="Applying tar in /home/podman_basic_user/.local/share/containers/storage/overlay/d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690/diff" time="2025-07-12T12:37:17-04:00" level=debug msg="finished reading layer \"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"" time="2025-07-12T12:37:17-04:00" level=debug msg="No compression detected" time="2025-07-12T12:37:17-04:00" level=debug msg="Compression change for blob sha256:948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566 (\"application/vnd.oci.image.config.v1+json\") not supported" time="2025-07-12T12:37:17-04:00" level=debug msg="Using original blob without modification" time="2025-07-12T12:37:17-04:00" level=debug msg="setting image creation date to 2025-07-12 16:37:17.118933835 +0000 UTC" time="2025-07-12T12:37:17-04:00" level=debug msg="created new image ID \"948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\" with metadata \"{}\"" time="2025-07-12T12:37:17-04:00" level=debug msg="added name \"localhost/podman-pause:4.9.4-dev-1708535009\" to image \"948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\"" time="2025-07-12T12:37:17-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\"" time="2025-07-12T12:37:17-04:00" level=debug msg="printing final image id \"948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Pod using bridge network mode" time="2025-07-12T12:37:17-04:00" level=debug msg="Got pod cgroup as /libpod_parent/49a038584fa170b91928655aabc3334b950d706880329a96a99914f26bc7c59d" time="2025-07-12T12:37:17-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-12T12:37:17-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:17-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-12T12:37:17-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-12T12:37:17-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566)" time="2025-07-12T12:37:17-04:00" level=debug msg="exporting opaque data as blob \"sha256:948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Inspecting image 948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566" time="2025-07-12T12:37:17-04:00" level=debug msg="exporting opaque data as blob \"sha256:948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Inspecting image 948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566" time="2025-07-12T12:37:17-04:00" level=debug msg="Inspecting image 948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566" time="2025-07-12T12:37:17-04:00" level=debug msg="using systemd mode: false" time="2025-07-12T12:37:17-04:00" level=debug msg="setting container name 49a038584fa1-infra" time="2025-07-12T12:37:17-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Allocated lock 1 for container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70" time="2025-07-12T12:37:17-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\"" time="2025-07-12T12:37:17-04:00" level=debug msg="exporting opaque data as blob \"sha256:948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Created container \"db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Container \"db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Container \"db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70\" has run directory \"/run/user/3001/containers/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:17-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:17-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-12T12:37:17-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:17-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-12T12:37:17-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-07-12T12:37:17-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:17-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:17-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-12T12:37:17-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:17-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-12T12:37:17-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:17-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:17-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-12T12:37:17-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:17-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-12T12:37:17-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-12T12:37:17-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:17-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:17-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-12T12:37:17-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:17-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-12T12:37:17-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-12T12:37:17-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-12T12:37:17-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-12T12:37:17-04:00" level=debug msg="using systemd mode: false" time="2025-07-12T12:37:17-04:00" level=debug msg="adding container to pod httpd1" time="2025-07-12T12:37:17-04:00" level=debug msg="setting container name httpd1-httpd1" time="2025-07-12T12:37:17-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-07-12T12:37:17-04:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-07-12T12:37:17-04:00" level=debug msg="Adding mount /proc" time="2025-07-12T12:37:17-04:00" level=debug msg="Adding mount /dev" time="2025-07-12T12:37:17-04:00" level=debug msg="Adding mount /dev/pts" time="2025-07-12T12:37:17-04:00" level=debug msg="Adding mount /dev/mqueue" time="2025-07-12T12:37:17-04:00" level=debug msg="Adding mount /sys" time="2025-07-12T12:37:17-04:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-07-12T12:37:17-04:00" level=debug msg="Allocated lock 2 for container 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057" time="2025-07-12T12:37:17-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Created container \"8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Container \"8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Container \"8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057\" has run directory \"/run/user/3001/containers/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Strongconnecting node db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70" time="2025-07-12T12:37:17-04:00" level=debug msg="Pushed db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 onto stack" time="2025-07-12T12:37:17-04:00" level=debug msg="Finishing node db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70. Popped db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 off stack" time="2025-07-12T12:37:17-04:00" level=debug msg="Strongconnecting node 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057" time="2025-07-12T12:37:17-04:00" level=debug msg="Pushed 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 onto stack" time="2025-07-12T12:37:17-04:00" level=debug msg="Finishing node 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057. Popped 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 off stack" time="2025-07-12T12:37:17-04:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/OM4I4NAT7NV6G6FUUDQFTEASSZ,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/a85a1e9326b5058f772b4fc440996b4ba136c02bad6966e471895a60f615298c/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/a85a1e9326b5058f772b4fc440996b4ba136c02bad6966e471895a60f615298c/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c277,c351\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Mounted container \"db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/a85a1e9326b5058f772b4fc440996b4ba136c02bad6966e471895a60f615298c/merged\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Created root filesystem for container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 at /home/podman_basic_user/.local/share/containers/storage/overlay/a85a1e9326b5058f772b4fc440996b4ba136c02bad6966e471895a60f615298c/merged" time="2025-07-12T12:37:17-04:00" level=debug msg="Made network namespace at /run/user/3001/netns/netns-d0ac84ca-ca87-3466-1642-2cff38531036 for container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70" time="2025-07-12T12:37:17-04:00" level=debug msg="creating rootless network namespace with name \"rootless-netns-d22c9f230d0691b8f418\"" time="2025-07-12T12:37:17-04:00" level=debug msg="slirp4netns command: /bin/slirp4netns --disable-host-loopback --mtu=65520 --enable-sandbox --enable-seccomp --enable-ipv6 -c -r 3 --netns-type=path /run/user/3001/netns/rootless-netns-d22c9f230d0691b8f418 tap0" time="2025-07-12T12:37:17-04:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" time="2025-07-12T12:37:17-04:00" level=debug msg="cni result for container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 network podman-default-kube-network: &{0.4.0 [{Name:cni-podman1 Mac:0a:fa:9a:36:b8:3a Sandbox:} {Name:veth14aad36c Mac:82:8b:99:b5:f7:b0 Sandbox:} {Name:eth0 Mac:b6:e7:40:6d:da:9c Sandbox:/run/user/3001/netns/netns-d0ac84ca-ca87-3466-1642-2cff38531036}] [{Version:4 Interface:0xc0008e9188 Address:{IP:10.89.0.2 Mask:ffffff00} Gateway:10.89.0.1}] [{Dst:{IP:0.0.0.0 Mask:00000000} GW:}] {[10.89.0.1] [dns.podman] []}}" time="2025-07-12T12:37:17-04:00" level=debug msg="rootlessport: time=\"2025-07-12T12:37:17-04:00\" level=info msg=\"Starting parent driver\"\ntime=\"2025-07-12T12:37:17-04:00\" level=info msg=\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport4142254753/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport4142254753/.bp.sock]\"\ntime=\"2025-07-12T12:37:17-04:00\" level=info msg=\"Starting child driver in child netns (\\\"/proc/self/exe\\\" [rootlessport-child])\"\n" time="2025-07-12T12:37:17-04:00" level=debug msg="rootlessport: time=\"2025-07-12T12:37:17-04:00\" level=info msg=\"Waiting for initComplete\"\n" time="2025-07-12T12:37:17-04:00" level=debug msg="rootlessport: time=\"2025-07-12T12:37:17-04:00\" level=info msg=\"initComplete is closed; parent and child established the communication channel\"\n" time="2025-07-12T12:37:17-04:00" level=debug msg="rootlessport: time=\"2025-07-12T12:37:17-04:00\" level=info msg=\"Exposing ports [{ 80 15001 1 tcp}]\"\n" time="2025-07-12T12:37:17-04:00" level=debug msg="rootlessport is ready" time="2025-07-12T12:37:17-04:00" level=debug msg="rootlessport: time=\"2025-07-12T12:37:17-04:00\" level=info msg=Ready\n" time="2025-07-12T12:37:17-04:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2025-07-12T12:37:17-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-07-12T12:37:17-04:00" level=debug msg="Workdir \"/\" resolved to host path \"/home/podman_basic_user/.local/share/containers/storage/overlay/a85a1e9326b5058f772b4fc440996b4ba136c02bad6966e471895a60f615298c/merged\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Created OCI spec for container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata/config.json" time="2025-07-12T12:37:17-04:00" level=debug msg="Got pod cgroup as " time="2025-07-12T12:37:17-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-07-12T12:37:17-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 -u db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 -r /usr/bin/runc -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata -p /run/user/3001/containers/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata/pidfile -n 49a038584fa1-infra --exit-dir /run/user/3001/libpod/tmp/exits --full-attach -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg cgroupfs --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70]" time="2025-07-12T12:37:17-04:00" level=info msg="Failed to add conmon to cgroupfs sandbox cgroup: creating cgroup for cpu: mkdir /sys/fs/cgroup/cpu/libpod_parent: permission denied" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2025-07-12T12:37:17-04:00" level=debug msg="Received: 26787" time="2025-07-12T12:37:17-04:00" level=info msg="Got Conmon PID as 26777" time="2025-07-12T12:37:17-04:00" level=debug msg="Created container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 in OCI runtime" time="2025-07-12T12:37:17-04:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-07-12T12:37:17-04:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-07-12T12:37:17-04:00" level=debug msg="Starting container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 with command [/catatonit -P]" time="2025-07-12T12:37:17-04:00" level=debug msg="Started container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70" time="2025-07-12T12:37:17-04:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/SGU47AVGSROXANDACX3GODEDPF,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/1f9869d8c46f1dd99255378c38742ad9514c5c2f608c50250883573ed9f20064/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/1f9869d8c46f1dd99255378c38742ad9514c5c2f608c50250883573ed9f20064/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c277,c351\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Mounted container \"8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/1f9869d8c46f1dd99255378c38742ad9514c5c2f608c50250883573ed9f20064/merged\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Created root filesystem for container 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 at /home/podman_basic_user/.local/share/containers/storage/overlay/1f9869d8c46f1dd99255378c38742ad9514c5c2f608c50250883573ed9f20064/merged" time="2025-07-12T12:37:17-04:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2025-07-12T12:37:17-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-07-12T12:37:17-04:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2025-07-12T12:37:17-04:00" level=debug msg="Created OCI spec for container 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata/config.json" time="2025-07-12T12:37:17-04:00" level=debug msg="Got pod cgroup as " time="2025-07-12T12:37:17-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-07-12T12:37:17-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 -u 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 -r /usr/bin/runc -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata -p /run/user/3001/containers/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata/pidfile -n httpd1-httpd1 --exit-dir /run/user/3001/libpod/tmp/exits --full-attach -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg cgroupfs --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057]" time="2025-07-12T12:37:17-04:00" level=info msg="Failed to add conmon to cgroupfs sandbox cgroup: creating cgroup for cpu: mkdir /sys/fs/cgroup/cpu/conmon: permission denied" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2025-07-12T12:37:17-04:00" level=debug msg="Received: 26808" time="2025-07-12T12:37:17-04:00" level=info msg="Got Conmon PID as 26798" time="2025-07-12T12:37:17-04:00" level=debug msg="Created container 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 in OCI runtime" time="2025-07-12T12:37:17-04:00" level=debug msg="Starting container 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 with command [/bin/busybox-extras httpd -f -p 80]" time="2025-07-12T12:37:17-04:00" level=debug msg="Started container 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057" time="2025-07-12T12:37:17-04:00" level=debug msg="Called kube.PersistentPostRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-07-12T12:37:17-04:00" level=debug msg="Shutting down engines" Jul 12 12:37:17 managed-node2 platform-python[26566]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jul 12 12:37:17 managed-node2 sudo[26563]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:37:18 managed-node2 sudo[26939]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yxngcsmbouppolsnchwedyvvmqwcqmcp ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338237.9288857-15948-265909207589811/AnsiballZ_systemd.py' Jul 12 12:37:18 managed-node2 sudo[26939]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:37:18 managed-node2 platform-python[26942]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Jul 12 12:37:18 managed-node2 systemd[25539]: Reloading. Jul 12 12:37:18 managed-node2 sudo[26939]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:37:18 managed-node2 sudo[27076]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jxciagckwyaiwverlxxxicxpjcaamzpb ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338238.5066783-15974-208582760237043/AnsiballZ_systemd.py' Jul 12 12:37:18 managed-node2 sudo[27076]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:37:18 managed-node2 dnsmasq[26762]: listening on cni-podman1(#3): fe80::8fa:9aff:fe36:b83a%cni-podman1 Jul 12 12:37:18 managed-node2 platform-python[27079]: ansible-systemd Invoked with name= scope=user enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None user=None Jul 12 12:37:18 managed-node2 systemd[25539]: Reloading. Jul 12 12:37:18 managed-node2 sudo[27076]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:37:19 managed-node2 sudo[27215]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-aeingsotugnwsviddfzcxglibrontkpg ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338239.1567352-16007-20750914035253/AnsiballZ_systemd.py' Jul 12 12:37:19 managed-node2 sudo[27215]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:37:19 managed-node2 platform-python[27218]: ansible-systemd Invoked with name= scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None Jul 12 12:37:19 managed-node2 systemd[25539]: Created slice podman\x2dkube.slice. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:19 managed-node2 systemd[25539]: Starting A template for running K8s workloads via podman-kube-play... -- Subject: Unit UNIT has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun starting up. Jul 12 12:37:19 managed-node2 conmon[26798]: conmon 8b812a2ec55f9de0cde0 : container 26808 exited with status 137 Jul 12 12:37:19 managed-node2 conmon[26777]: conmon db962b9f1559ffd15c96 : container 26787 exited with status 137 Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057)" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=info msg="Using sqlite as database backend" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70)" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=info msg="Using sqlite as database backend" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using graph driver overlay" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using run root /run/user/3001/containers" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using transient store: false" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Cached value indicated that metacopy is not being used" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Cached value indicated that native-diff is usable" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Initializing event backend file" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=info msg="Setting parallel job count to 7" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using graph driver overlay" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using run root /run/user/3001/containers" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using transient store: false" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Cached value indicated that metacopy is not being used" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Cached value indicated that native-diff is usable" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Initializing event backend file" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=info msg="Setting parallel job count to 7" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057)" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Shutting down engines" Jul 12 12:37:19 managed-node2 kernel: cni-podman1: port 1(veth14aad36c) entered disabled state Jul 12 12:37:19 managed-node2 kernel: device veth14aad36c left promiscuous mode Jul 12 12:37:19 managed-node2 kernel: cni-podman1: port 1(veth14aad36c) entered disabled state Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70)" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Shutting down engines" Jul 12 12:37:19 managed-node2 podman[27224]: Pods stopped: Jul 12 12:37:19 managed-node2 podman[27224]: 49a038584fa170b91928655aabc3334b950d706880329a96a99914f26bc7c59d Jul 12 12:37:19 managed-node2 podman[27224]: Pods removed: Jul 12 12:37:19 managed-node2 podman[27224]: 49a038584fa170b91928655aabc3334b950d706880329a96a99914f26bc7c59d Jul 12 12:37:19 managed-node2 podman[27224]: Secrets removed: Jul 12 12:37:19 managed-node2 podman[27224]: Volumes removed: Jul 12 12:37:20 managed-node2 systemd[25539]: Started rootless-netns-910042d3.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:20 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): veth73ffc199: link is not ready Jul 12 12:37:20 managed-node2 kernel: cni-podman1: port 1(veth73ffc199) entered blocking state Jul 12 12:37:20 managed-node2 kernel: cni-podman1: port 1(veth73ffc199) entered disabled state Jul 12 12:37:20 managed-node2 kernel: device veth73ffc199 entered promiscuous mode Jul 12 12:37:20 managed-node2 kernel: cni-podman1: port 1(veth73ffc199) entered blocking state Jul 12 12:37:20 managed-node2 kernel: cni-podman1: port 1(veth73ffc199) entered forwarding state Jul 12 12:37:20 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth73ffc199: link becomes ready Jul 12 12:37:20 managed-node2 dnsmasq[27470]: listening on cni-podman1(#3): 10.89.0.1 Jul 12 12:37:20 managed-node2 dnsmasq[27472]: started, version 2.79 cachesize 150 Jul 12 12:37:20 managed-node2 dnsmasq[27472]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Jul 12 12:37:20 managed-node2 dnsmasq[27472]: using local addresses only for domain dns.podman Jul 12 12:37:20 managed-node2 dnsmasq[27472]: reading /etc/resolv.conf Jul 12 12:37:20 managed-node2 dnsmasq[27472]: using local addresses only for domain dns.podman Jul 12 12:37:20 managed-node2 dnsmasq[27472]: using nameserver 10.0.2.3#53 Jul 12 12:37:20 managed-node2 dnsmasq[27472]: using nameserver 10.29.169.13#53 Jul 12 12:37:20 managed-node2 dnsmasq[27472]: using nameserver 10.29.170.12#53 Jul 12 12:37:20 managed-node2 dnsmasq[27472]: using nameserver 10.2.32.1#53 Jul 12 12:37:20 managed-node2 dnsmasq[27472]: read /run/user/3001/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Jul 12 12:37:20 managed-node2 podman[27224]: Pod: Jul 12 12:37:20 managed-node2 podman[27224]: e6000b000d16f866b55a1a19dd3fb3262c82c732a816136db57b7e8368f0a32a Jul 12 12:37:20 managed-node2 podman[27224]: Container: Jul 12 12:37:20 managed-node2 podman[27224]: 3e84611729acf9a795f4d6223da39f911f01d8e5bb78d05b15144b66878ad807 Jul 12 12:37:20 managed-node2 systemd[25539]: Started A template for running K8s workloads via podman-kube-play. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:20 managed-node2 sudo[27215]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:37:21 managed-node2 platform-python[27649]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 12 12:37:21 managed-node2 platform-python[27773]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:37:22 managed-node2 dnsmasq[27472]: listening on cni-podman1(#3): fe80::c95:b4ff:fe67:d35c%cni-podman1 Jul 12 12:37:23 managed-node2 platform-python[27898]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:24 managed-node2 platform-python[28022]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:37:24 managed-node2 platform-python[28145]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:37:25 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:37:25 managed-node2 platform-python[28435]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:37:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:37:26 managed-node2 platform-python[28558]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:37:26 managed-node2 platform-python[28681]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 12 12:37:27 managed-node2 platform-python[28780]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752338246.5734363-16367-230792965661198/source _original_basename=tmpcx3lufsl follow=False checksum=d1d2b75756121a76b51c55942528a638a8e19d00 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Jul 12 12:37:27 managed-node2 platform-python[28905]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:37:27 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice. -- Subject: Unit machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice has finished starting up. -- -- The start-up result is done. Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8210] manager: (cni-podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Jul 12 12:37:27 managed-node2 systemd-udevd[28952]: Using default interface naming scheme 'rhel-8.0'. Jul 12 12:37:27 managed-node2 systemd-udevd[28953]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Jul 12 12:37:27 managed-node2 systemd-udevd[28953]: Could not generate persistent MAC address for vetha808c72b: No such file or directory Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8294] manager: (vetha808c72b): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Jul 12 12:37:27 managed-node2 systemd-udevd[28952]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Jul 12 12:37:27 managed-node2 systemd-udevd[28952]: Could not generate persistent MAC address for cni-podman1: No such file or directory Jul 12 12:37:27 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vetha808c72b: link is not ready Jul 12 12:37:27 managed-node2 kernel: cni-podman1: port 1(vetha808c72b) entered blocking state Jul 12 12:37:27 managed-node2 kernel: cni-podman1: port 1(vetha808c72b) entered disabled state Jul 12 12:37:27 managed-node2 kernel: device vetha808c72b entered promiscuous mode Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8417] device (cni-podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', sys-iface-state: 'external') Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8423] device (cni-podman1): state change: unavailable -> disconnected (reason 'connection-assumed', sys-iface-state: 'external') Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8433] device (cni-podman1): Activation: starting connection 'cni-podman1' (9399044c-ebcb-4319-aff1-7a172e94e2ea) Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8434] device (cni-podman1): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'external') Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8436] device (cni-podman1): state change: prepare -> config (reason 'none', sys-iface-state: 'external') Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8438] device (cni-podman1): state change: config -> ip-config (reason 'none', sys-iface-state: 'external') Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8440] device (cni-podman1): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'external') Jul 12 12:37:27 managed-node2 dbus-daemon[601]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.5' (uid=0 pid=666 comm="/usr/sbin/NetworkManager --no-daemon " label="system_u:system_r:NetworkManager_t:s0") Jul 12 12:37:27 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready Jul 12 12:37:27 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready Jul 12 12:37:27 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vetha808c72b: link becomes ready Jul 12 12:37:27 managed-node2 kernel: cni-podman1: port 1(vetha808c72b) entered blocking state Jul 12 12:37:27 managed-node2 kernel: cni-podman1: port 1(vetha808c72b) entered forwarding state Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8634] device (vetha808c72b): carrier: link connected Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8637] device (cni-podman1): carrier: link connected Jul 12 12:37:27 managed-node2 systemd[1]: Starting Network Manager Script Dispatcher Service... -- Subject: Unit NetworkManager-dispatcher.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has begun starting up. Jul 12 12:37:27 managed-node2 dbus-daemon[601]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher' Jul 12 12:37:27 managed-node2 systemd[1]: Started Network Manager Script Dispatcher Service. -- Subject: Unit NetworkManager-dispatcher.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has finished starting up. -- -- The start-up result is done. Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.9275] device (cni-podman1): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'external') Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.9277] device (cni-podman1): state change: secondaries -> activated (reason 'none', sys-iface-state: 'external') Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.9282] device (cni-podman1): Activation: successful, device activated. Jul 12 12:37:28 managed-node2 dnsmasq[29076]: listening on cni-podman1(#3): 10.89.0.1 Jul 12 12:37:28 managed-node2 dnsmasq[29080]: started, version 2.79 cachesize 150 Jul 12 12:37:28 managed-node2 dnsmasq[29080]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Jul 12 12:37:28 managed-node2 dnsmasq[29080]: using local addresses only for domain dns.podman Jul 12 12:37:28 managed-node2 dnsmasq[29080]: reading /etc/resolv.conf Jul 12 12:37:28 managed-node2 dnsmasq[29080]: using local addresses only for domain dns.podman Jul 12 12:37:28 managed-node2 dnsmasq[29080]: using nameserver 10.29.169.13#53 Jul 12 12:37:28 managed-node2 dnsmasq[29080]: using nameserver 10.29.170.12#53 Jul 12 12:37:28 managed-node2 dnsmasq[29080]: using nameserver 10.2.32.1#53 Jul 12 12:37:28 managed-node2 dnsmasq[29080]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Jul 12 12:37:28 managed-node2 systemd[1]: Started libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope. -- Subject: Unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:28 managed-node2 conmon[29082]: conmon 2bba312ac73f9a5e5631 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Jul 12 12:37:28 managed-node2 conmon[29082]: conmon 2bba312ac73f9a5e5631 : terminal_ctrl_fd: 13 Jul 12 12:37:28 managed-node2 conmon[29082]: conmon 2bba312ac73f9a5e5631 : winsz read side: 17, winsz write side: 18 Jul 12 12:37:28 managed-node2 systemd[1]: Started libcontainer container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07. -- Subject: Unit libpod-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:28 managed-node2 conmon[29082]: conmon 2bba312ac73f9a5e5631 : container PID: 29092 Jul 12 12:37:28 managed-node2 systemd[1]: Started libpod-conmon-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope. -- Subject: Unit libpod-conmon-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:28 managed-node2 conmon[29103]: conmon dde5d521943a5269ac5d : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach} Jul 12 12:37:28 managed-node2 conmon[29103]: conmon dde5d521943a5269ac5d : terminal_ctrl_fd: 12 Jul 12 12:37:28 managed-node2 conmon[29103]: conmon dde5d521943a5269ac5d : winsz read side: 16, winsz write side: 17 Jul 12 12:37:28 managed-node2 systemd[1]: Started libcontainer container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0. -- Subject: Unit libpod-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:28 managed-node2 conmon[29103]: conmon dde5d521943a5269ac5d : container PID: 29114 Jul 12 12:37:28 managed-node2 platform-python[28905]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Jul 12 12:37:28 managed-node2 platform-python[28905]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd Container: dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 Jul 12 12:37:28 managed-node2 platform-python[28905]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-07-12T12:37:27-04:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2025-07-12T12:37:27-04:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-07-12T12:37:27-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-07-12T12:37:27-04:00" level=info msg="Using sqlite as database backend" time="2025-07-12T12:37:27-04:00" level=debug msg="Using graph driver overlay" time="2025-07-12T12:37:27-04:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Using run root /run/containers/storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2025-07-12T12:37:27-04:00" level=debug msg="Using tmp dir /run/libpod" time="2025-07-12T12:37:27-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2025-07-12T12:37:27-04:00" level=debug msg="Using transient store: false" time="2025-07-12T12:37:27-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-12T12:37:27-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-12T12:37:27-04:00" level=debug msg="Cached value indicated that metacopy is being used" time="2025-07-12T12:37:27-04:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2025-07-12T12:37:27-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2025-07-12T12:37:27-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2025-07-12T12:37:27-04:00" level=debug msg="Initializing event backend file" time="2025-07-12T12:37:27-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-07-12T12:37:27-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-07-12T12:37:27-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-07-12T12:37:27-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-07-12T12:37:27-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-07-12T12:37:27-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-07-12T12:37:27-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-07-12T12:37:27-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" time="2025-07-12T12:37:27-04:00" level=info msg="Setting parallel job count to 7" time="2025-07-12T12:37:27-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-12 12:34:58.774465298 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-07-12T12:37:27-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-07-12T12:37:27-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:27-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-12T12:37:27-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2)" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Pod using bridge network mode" time="2025-07-12T12:37:27-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice for parent machine.slice and name libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd" time="2025-07-12T12:37:27-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice" time="2025-07-12T12:37:27-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice" time="2025-07-12T12:37:27-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:27-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-12T12:37:27-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2)" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Inspecting image 4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Inspecting image 4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2" time="2025-07-12T12:37:27-04:00" level=debug msg="Inspecting image 4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2" time="2025-07-12T12:37:27-04:00" level=debug msg="using systemd mode: false" time="2025-07-12T12:37:27-04:00" level=debug msg="setting container name a247d85c3822-infra" time="2025-07-12T12:37:27-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Allocated lock 1 for container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07" time="2025-07-12T12:37:27-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\"" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2025-07-12T12:37:27-04:00" level=debug msg="Check for idmapped mounts support " time="2025-07-12T12:37:27-04:00" level=debug msg="Created container \"2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Container \"2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\" has work directory \"/var/lib/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Container \"2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\" has run directory \"/run/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:27-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-12T12:37:27-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-07-12T12:37:27-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:27-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-12T12:37:27-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:27-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-12T12:37:27-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:27-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-12T12:37:27-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-12T12:37:27-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-12T12:37:27-04:00" level=debug msg="using systemd mode: false" time="2025-07-12T12:37:27-04:00" level=debug msg="adding container to pod httpd2" time="2025-07-12T12:37:27-04:00" level=debug msg="setting container name httpd2-httpd2" time="2025-07-12T12:37:27-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-07-12T12:37:27-04:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-07-12T12:37:27-04:00" level=debug msg="Adding mount /proc" time="2025-07-12T12:37:27-04:00" level=debug msg="Adding mount /dev" time="2025-07-12T12:37:27-04:00" level=debug msg="Adding mount /dev/pts" time="2025-07-12T12:37:27-04:00" level=debug msg="Adding mount /dev/mqueue" time="2025-07-12T12:37:27-04:00" level=debug msg="Adding mount /sys" time="2025-07-12T12:37:27-04:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-07-12T12:37:27-04:00" level=debug msg="Allocated lock 2 for container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0" time="2025-07-12T12:37:27-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Created container \"dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Container \"dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\" has work directory \"/var/lib/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Container \"dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\" has run directory \"/run/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Strongconnecting node dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0" time="2025-07-12T12:37:27-04:00" level=debug msg="Pushed dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 onto stack" time="2025-07-12T12:37:27-04:00" level=debug msg="Recursing to successor node 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07" time="2025-07-12T12:37:27-04:00" level=debug msg="Strongconnecting node 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07" time="2025-07-12T12:37:27-04:00" level=debug msg="Pushed 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 onto stack" time="2025-07-12T12:37:27-04:00" level=debug msg="Finishing node 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07. Popped 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 off stack" time="2025-07-12T12:37:27-04:00" level=debug msg="Finishing node dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0. Popped dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 off stack" time="2025-07-12T12:37:27-04:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/BPQ67IPF3U2MS7MKOAJ6EE5AVL,upperdir=/var/lib/containers/storage/overlay/110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0/diff,workdir=/var/lib/containers/storage/overlay/110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c20,c130\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Mounted container \"2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\" at \"/var/lib/containers/storage/overlay/110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0/merged\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Created root filesystem for container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 at /var/lib/containers/storage/overlay/110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0/merged" time="2025-07-12T12:37:27-04:00" level=debug msg="Made network namespace at /run/netns/netns-93660061-5819-4d54-dfec-784d954efe33 for container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07" time="2025-07-12T12:37:28-04:00" level=debug msg="cni result for container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 network podman-default-kube-network: &{0.4.0 [{Name:cni-podman1 Mac:7e:63:02:ee:ed:5c Sandbox:} {Name:vetha808c72b Mac:8a:e4:ca:d3:1c:60 Sandbox:} {Name:eth0 Mac:f2:ab:50:c0:43:48 Sandbox:/run/netns/netns-93660061-5819-4d54-dfec-784d954efe33}] [{Version:4 Interface:0xc0006632b8 Address:{IP:10.89.0.2 Mask:ffffff00} Gateway:10.89.0.1}] [{Dst:{IP:0.0.0.0 Mask:00000000} GW:}] {[10.89.0.1] [dns.podman] []}}" time="2025-07-12T12:37:28-04:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2025-07-12T12:37:28-04:00" level=debug msg="Setting Cgroups for container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 to machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice:libpod:2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07" time="2025-07-12T12:37:28-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-07-12T12:37:28-04:00" level=debug msg="Workdir \"/\" resolved to host path \"/var/lib/containers/storage/overlay/110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0/merged\"" time="2025-07-12T12:37:28-04:00" level=debug msg="Created OCI spec for container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 at /var/lib/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata/config.json" time="2025-07-12T12:37:28-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice for parent machine.slice and name libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd" time="2025-07-12T12:37:28-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice" time="2025-07-12T12:37:28-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice" time="2025-07-12T12:37:28-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-07-12T12:37:28-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 -u 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 -r /usr/bin/runc -b /var/lib/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata -p /run/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata/pidfile -n a247d85c3822-infra --exit-dir /run/libpod/exits --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07]" time="2025-07-12T12:37:28-04:00" level=info msg="Running conmon under slice machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice and unitName libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope" time="2025-07-12T12:37:28-04:00" level=debug msg="Received: 29092" time="2025-07-12T12:37:28-04:00" level=info msg="Got Conmon PID as 29082" time="2025-07-12T12:37:28-04:00" level=debug msg="Created container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 in OCI runtime" time="2025-07-12T12:37:28-04:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-07-12T12:37:28-04:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-07-12T12:37:28-04:00" level=debug msg="Starting container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 with command [/catatonit -P]" time="2025-07-12T12:37:28-04:00" level=debug msg="Started container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07" time="2025-07-12T12:37:28-04:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/UMCCOJYMJQIWGK7MOUSAJGNIT3,upperdir=/var/lib/containers/storage/overlay/4d39b8c35596ee353ef440ad2f35ea10e1beaa01c7e9bc9a2362e309c3d6bc72/diff,workdir=/var/lib/containers/storage/overlay/4d39b8c35596ee353ef440ad2f35ea10e1beaa01c7e9bc9a2362e309c3d6bc72/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c20,c130\"" time="2025-07-12T12:37:28-04:00" level=debug msg="Mounted container \"dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\" at \"/var/lib/containers/storage/overlay/4d39b8c35596ee353ef440ad2f35ea10e1beaa01c7e9bc9a2362e309c3d6bc72/merged\"" time="2025-07-12T12:37:28-04:00" level=debug msg="Created root filesystem for container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 at /var/lib/containers/storage/overlay/4d39b8c35596ee353ef440ad2f35ea10e1beaa01c7e9bc9a2362e309c3d6bc72/merged" time="2025-07-12T12:37:28-04:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2025-07-12T12:37:28-04:00" level=debug msg="Setting Cgroups for container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 to machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice:libpod:dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0" time="2025-07-12T12:37:28-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-07-12T12:37:28-04:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2025-07-12T12:37:28-04:00" level=debug msg="Created OCI spec for container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 at /var/lib/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata/config.json" time="2025-07-12T12:37:28-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice for parent machine.slice and name libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd" time="2025-07-12T12:37:28-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice" time="2025-07-12T12:37:28-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice" time="2025-07-12T12:37:28-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-07-12T12:37:28-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 -u dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 -r /usr/bin/runc -b /var/lib/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata -p /run/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0]" time="2025-07-12T12:37:28-04:00" level=info msg="Running conmon under slice machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice and unitName libpod-conmon-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope" time="2025-07-12T12:37:28-04:00" level=debug msg="Received: 29114" time="2025-07-12T12:37:28-04:00" level=info msg="Got Conmon PID as 29103" time="2025-07-12T12:37:28-04:00" level=debug msg="Created container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 in OCI runtime" time="2025-07-12T12:37:28-04:00" level=debug msg="Starting container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 with command [/bin/busybox-extras httpd -f -p 80]" time="2025-07-12T12:37:28-04:00" level=debug msg="Started container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0" time="2025-07-12T12:37:28-04:00" level=debug msg="Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-07-12T12:37:28-04:00" level=debug msg="Shutting down engines" Jul 12 12:37:28 managed-node2 platform-python[28905]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jul 12 12:37:28 managed-node2 platform-python[29245]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Jul 12 12:37:28 managed-node2 systemd[1]: Reloading. Jul 12 12:37:29 managed-node2 platform-python[29406]: ansible-systemd Invoked with name= scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None user=None Jul 12 12:37:29 managed-node2 systemd[1]: Reloading. Jul 12 12:37:29 managed-node2 dnsmasq[29080]: listening on cni-podman1(#3): fe80::7c63:2ff:feee:ed5c%cni-podman1 Jul 12 12:37:30 managed-node2 platform-python[29569]: ansible-systemd Invoked with name= scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None Jul 12 12:37:30 managed-node2 systemd[1]: Created slice system-podman\x2dkube.slice. -- Subject: Unit system-podman\x2dkube.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit system-podman\x2dkube.slice has finished starting up. -- -- The start-up result is done. Jul 12 12:37:30 managed-node2 systemd[1]: Starting A template for running K8s workloads via podman-kube-play... -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun starting up. Jul 12 12:37:30 managed-node2 conmon[29082]: conmon 2bba312ac73f9a5e5631 : container 29092 exited with status 137 Jul 12 12:37:30 managed-node2 systemd[1]: libpod-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has successfully entered the 'dead' state. Jul 12 12:37:30 managed-node2 systemd[1]: libpod-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope: Consumed 31ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope completed and consumed the indicated resources. Jul 12 12:37:30 managed-node2 conmon[29103]: conmon dde5d521943a5269ac5d : container 29114 exited with status 137 Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07)" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jul 12 12:37:30 managed-node2 systemd[1]: libpod-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope has successfully entered the 'dead' state. Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jul 12 12:37:30 managed-node2 systemd[1]: libpod-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope: Consumed 32ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope completed and consumed the indicated resources. Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=info msg="Using sqlite as database backend" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using graph driver overlay" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using graph root /var/lib/containers/storage" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using run root /run/containers/storage" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using tmp dir /run/libpod" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using transient store: false" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Cached value indicated that metacopy is being used" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Cached value indicated that native-diff is not being used" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Initializing event backend file" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=info msg="Setting parallel job count to 7" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0)" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=info msg="Using sqlite as database backend" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using graph driver overlay" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using graph root /var/lib/containers/storage" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using run root /run/containers/storage" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using tmp dir /run/libpod" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using transient store: false" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Cached value indicated that metacopy is being used" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Cached value indicated that native-diff is not being used" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Initializing event backend file" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=info msg="Setting parallel job count to 7" Jul 12 12:37:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay-4d39b8c35596ee353ef440ad2f35ea10e1beaa01c7e9bc9a2362e309c3d6bc72-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-4d39b8c35596ee353ef440ad2f35ea10e1beaa01c7e9bc9a2362e309c3d6bc72-merged.mount has successfully entered the 'dead' state. Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0)" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Shutting down engines" Jul 12 12:37:30 managed-node2 systemd[1]: libpod-conmon-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope has successfully entered the 'dead' state. Jul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vetha808c72b) entered disabled state Jul 12 12:37:30 managed-node2 kernel: device vetha808c72b left promiscuous mode Jul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vetha808c72b) entered disabled state Jul 12 12:37:30 managed-node2 systemd[1]: run-netns-netns\x2d93660061\x2d5819\x2d4d54\x2ddfec\x2d784d954efe33.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-netns-netns\x2d93660061\x2d5819\x2d4d54\x2ddfec\x2d784d954efe33.mount has successfully entered the 'dead' state. Jul 12 12:37:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07-userdata-shm.mount has successfully entered the 'dead' state. Jul 12 12:37:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay-110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0-merged.mount has successfully entered the 'dead' state. Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07)" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Shutting down engines" Jul 12 12:37:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:37:30 managed-node2 systemd[1]: Stopping libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope. -- Subject: Unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has begun shutting down. Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=info msg="Received shutdown signal \"terminated\", terminating!" PID=29592 Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=info msg="Invoking shutdown handler \"libpod\"" PID=29592 Jul 12 12:37:30 managed-node2 systemd[1]: libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has successfully entered the 'dead' state. Jul 12 12:37:30 managed-node2 systemd[1]: Stopped libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope. -- Subject: Unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has finished shutting down. Jul 12 12:37:30 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice. -- Subject: Unit machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice has finished shutting down. Jul 12 12:37:30 managed-node2 systemd[1]: machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice: Consumed 212ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice completed and consumed the indicated resources. Jul 12 12:37:30 managed-node2 podman[29576]: Pods stopped: Jul 12 12:37:30 managed-node2 podman[29576]: a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd Jul 12 12:37:30 managed-node2 podman[29576]: Pods removed: Jul 12 12:37:30 managed-node2 podman[29576]: a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd Jul 12 12:37:30 managed-node2 podman[29576]: Secrets removed: Jul 12 12:37:30 managed-node2 podman[29576]: Volumes removed: Jul 12 12:37:30 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice. -- Subject: Unit machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice has finished starting up. -- -- The start-up result is done. Jul 12 12:37:30 managed-node2 systemd[1]: Started libcontainer container af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315. -- Subject: Unit libpod-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:30 managed-node2 systemd-udevd[29733]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Jul 12 12:37:30 managed-node2 systemd-udevd[29733]: Could not generate persistent MAC address for vethec9deee2: No such file or directory Jul 12 12:37:30 managed-node2 NetworkManager[666]: [1752338250.7637] manager: (vethec9deee2): new Veth device (/org/freedesktop/NetworkManager/Devices/5) Jul 12 12:37:30 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vethec9deee2: link is not ready Jul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered blocking state Jul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered disabled state Jul 12 12:37:30 managed-node2 kernel: device vethec9deee2 entered promiscuous mode Jul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered blocking state Jul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered forwarding state Jul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered disabled state Jul 12 12:37:30 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vethec9deee2: link becomes ready Jul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered blocking state Jul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered forwarding state Jul 12 12:37:30 managed-node2 NetworkManager[666]: [1752338250.7897] device (vethec9deee2): carrier: link connected Jul 12 12:37:30 managed-node2 NetworkManager[666]: [1752338250.7911] device (cni-podman1): carrier: link connected Jul 12 12:37:30 managed-node2 dnsmasq[29803]: listening on cni-podman1(#3): 10.89.0.1 Jul 12 12:37:30 managed-node2 dnsmasq[29803]: listening on cni-podman1(#3): fe80::7c63:2ff:feee:ed5c%cni-podman1 Jul 12 12:37:30 managed-node2 dnsmasq[29808]: started, version 2.79 cachesize 150 Jul 12 12:37:30 managed-node2 dnsmasq[29808]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Jul 12 12:37:30 managed-node2 dnsmasq[29808]: using local addresses only for domain dns.podman Jul 12 12:37:30 managed-node2 dnsmasq[29808]: reading /etc/resolv.conf Jul 12 12:37:30 managed-node2 dnsmasq[29808]: using local addresses only for domain dns.podman Jul 12 12:37:30 managed-node2 dnsmasq[29808]: using nameserver 10.29.169.13#53 Jul 12 12:37:30 managed-node2 dnsmasq[29808]: using nameserver 10.29.170.12#53 Jul 12 12:37:30 managed-node2 dnsmasq[29808]: using nameserver 10.2.32.1#53 Jul 12 12:37:30 managed-node2 dnsmasq[29808]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Jul 12 12:37:30 managed-node2 systemd[1]: Started libcontainer container 39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a. -- Subject: Unit libpod-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:31 managed-node2 systemd[1]: Started libcontainer container fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149. -- Subject: Unit libpod-fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:31 managed-node2 podman[29576]: Pod: Jul 12 12:37:31 managed-node2 podman[29576]: ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5 Jul 12 12:37:31 managed-node2 podman[29576]: Container: Jul 12 12:37:31 managed-node2 podman[29576]: fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149 Jul 12 12:37:31 managed-node2 systemd[1]: Started A template for running K8s workloads via podman-kube-play. -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished starting up. -- -- The start-up result is done. Jul 12 12:37:31 managed-node2 platform-python[29974]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:37:33 managed-node2 platform-python[30107]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:34 managed-node2 platform-python[30231]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:37:34 managed-node2 platform-python[30354]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:37:36 managed-node2 platform-python[30643]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:37:36 managed-node2 platform-python[30766]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:37:37 managed-node2 platform-python[30889]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 12 12:37:37 managed-node2 platform-python[30988]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752338256.766105-16794-202828239900759/source _original_basename=tmpvj89f27p follow=False checksum=92197531821af6a866eb3c8d736aa33d00262127 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Jul 12 12:37:37 managed-node2 platform-python[31113]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:37:37 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice. -- Subject: Unit machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice has finished starting up. -- -- The start-up result is done. Jul 12 12:37:38 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jul 12 12:37:38 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vetha724e550: link is not ready Jul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered blocking state Jul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered disabled state Jul 12 12:37:38 managed-node2 kernel: device vetha724e550 entered promiscuous mode Jul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered blocking state Jul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered forwarding state Jul 12 12:37:38 managed-node2 NetworkManager[666]: [1752338258.0378] manager: (vetha724e550): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Jul 12 12:37:38 managed-node2 systemd-udevd[31161]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Jul 12 12:37:38 managed-node2 systemd-udevd[31161]: Could not generate persistent MAC address for vetha724e550: No such file or directory Jul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered disabled state Jul 12 12:37:38 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready Jul 12 12:37:38 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready Jul 12 12:37:38 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vetha724e550: link becomes ready Jul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered blocking state Jul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered forwarding state Jul 12 12:37:38 managed-node2 NetworkManager[666]: [1752338258.0795] device (vetha724e550): carrier: link connected Jul 12 12:37:38 managed-node2 dnsmasq[29808]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 2 addresses Jul 12 12:37:38 managed-node2 systemd[1]: Started libpod-conmon-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope. -- Subject: Unit libpod-conmon-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:38 managed-node2 systemd[1]: Started libcontainer container 8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136. -- Subject: Unit libpod-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:38 managed-node2 systemd[1]: Started libpod-conmon-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope. -- Subject: Unit libpod-conmon-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:38 managed-node2 systemd[1]: Started libcontainer container 239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8. -- Subject: Unit libpod-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:38 managed-node2 platform-python[31394]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Jul 12 12:37:38 managed-node2 systemd[1]: Reloading. Jul 12 12:37:39 managed-node2 platform-python[31555]: ansible-systemd Invoked with name= scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None user=None Jul 12 12:37:39 managed-node2 systemd[1]: Reloading. Jul 12 12:37:40 managed-node2 platform-python[31710]: ansible-systemd Invoked with name= scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None Jul 12 12:37:40 managed-node2 systemd[1]: Starting A template for running K8s workloads via podman-kube-play... -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun starting up. Jul 12 12:37:40 managed-node2 systemd[1]: libpod-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope has successfully entered the 'dead' state. Jul 12 12:37:40 managed-node2 systemd[1]: libpod-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope: Consumed 31ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope completed and consumed the indicated resources. Jul 12 12:37:40 managed-node2 systemd[1]: libpod-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope has successfully entered the 'dead' state. Jul 12 12:37:40 managed-node2 systemd[1]: libpod-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope: Consumed 32ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope completed and consumed the indicated resources. Jul 12 12:37:40 managed-node2 dnsmasq[29808]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Jul 12 12:37:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay-719154c260667d3aa74578747f416c045e6c4537dd0a7c671adf4544cf226e68-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-719154c260667d3aa74578747f416c045e6c4537dd0a7c671adf4544cf226e68-merged.mount has successfully entered the 'dead' state. Jul 12 12:37:40 managed-node2 systemd[1]: libpod-conmon-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope has successfully entered the 'dead' state. Jul 12 12:37:40 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered disabled state Jul 12 12:37:40 managed-node2 kernel: device vetha724e550 left promiscuous mode Jul 12 12:37:40 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered disabled state Jul 12 12:37:40 managed-node2 systemd[1]: run-netns-netns\x2d1bb9153f\x2df22a\x2dcc5d\x2d3c7a\x2dd87e5ee733ce.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-netns-netns\x2d1bb9153f\x2df22a\x2dcc5d\x2d3c7a\x2dd87e5ee733ce.mount has successfully entered the 'dead' state. Jul 12 12:37:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136-userdata-shm.mount has successfully entered the 'dead' state. Jul 12 12:37:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay-eb2787269c2e2cd7be423803b1667df0aa39556214229872d965cd9cab309419-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-eb2787269c2e2cd7be423803b1667df0aa39556214229872d965cd9cab309419-merged.mount has successfully entered the 'dead' state. Jul 12 12:37:40 managed-node2 systemd[1]: libpod-conmon-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope has successfully entered the 'dead' state. Jul 12 12:37:40 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice. -- Subject: Unit machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice has finished shutting down. Jul 12 12:37:40 managed-node2 systemd[1]: machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice: Consumed 199ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice completed and consumed the indicated resources. Jul 12 12:37:40 managed-node2 podman[31717]: Pods stopped: Jul 12 12:37:40 managed-node2 podman[31717]: 537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583 Jul 12 12:37:40 managed-node2 podman[31717]: Pods removed: Jul 12 12:37:40 managed-node2 podman[31717]: 537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583 Jul 12 12:37:40 managed-node2 podman[31717]: Secrets removed: Jul 12 12:37:40 managed-node2 podman[31717]: Volumes removed: Jul 12 12:37:40 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice. -- Subject: Unit machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice has finished starting up. -- -- The start-up result is done. Jul 12 12:37:40 managed-node2 systemd[1]: Started libcontainer container 7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b. -- Subject: Unit libpod-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:40 managed-node2 NetworkManager[666]: [1752338260.9491] manager: (veth3fe74d71): new Veth device (/org/freedesktop/NetworkManager/Devices/7) Jul 12 12:37:40 managed-node2 systemd-udevd[31882]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Jul 12 12:37:40 managed-node2 systemd-udevd[31882]: Could not generate persistent MAC address for veth3fe74d71: No such file or directory Jul 12 12:37:40 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): veth3fe74d71: link is not ready Jul 12 12:37:40 managed-node2 kernel: cni-podman1: port 2(veth3fe74d71) entered blocking state Jul 12 12:37:40 managed-node2 kernel: cni-podman1: port 2(veth3fe74d71) entered disabled state Jul 12 12:37:40 managed-node2 kernel: device veth3fe74d71 entered promiscuous mode Jul 12 12:37:40 managed-node2 kernel: cni-podman1: port 2(veth3fe74d71) entered blocking state Jul 12 12:37:40 managed-node2 kernel: cni-podman1: port 2(veth3fe74d71) entered forwarding state Jul 12 12:37:40 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth3fe74d71: link becomes ready Jul 12 12:37:40 managed-node2 NetworkManager[666]: [1752338260.9931] device (veth3fe74d71): carrier: link connected Jul 12 12:37:41 managed-node2 dnsmasq[29808]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 2 addresses Jul 12 12:37:41 managed-node2 systemd[1]: Started libcontainer container 304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70. -- Subject: Unit libpod-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:41 managed-node2 systemd[1]: Started libcontainer container e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e. -- Subject: Unit libpod-e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:41 managed-node2 systemd[1]: Started A template for running K8s workloads via podman-kube-play. -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished starting up. -- -- The start-up result is done. Jul 12 12:37:41 managed-node2 podman[31717]: Pod: Jul 12 12:37:41 managed-node2 podman[31717]: ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2 Jul 12 12:37:41 managed-node2 podman[31717]: Container: Jul 12 12:37:41 managed-node2 podman[31717]: e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e Jul 12 12:37:41 managed-node2 sudo[32116]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jyrknhzkjwtoyoqfhtaoymdanzpphasy ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338261.7921255-17011-231273247445257/AnsiballZ_command.py' Jul 12 12:37:41 managed-node2 sudo[32116]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:37:42 managed-node2 platform-python[32119]: ansible-command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:42 managed-node2 systemd[25539]: Started podman-32128.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:42 managed-node2 sudo[32116]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:37:42 managed-node2 platform-python[32258]: ansible-command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:42 managed-node2 platform-python[32389]: ansible-command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:43 managed-node2 sudo[32528]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lhqlhpwddcodyczhbsyjvspptskrqirm ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338263.154581-17086-94452808741655/AnsiballZ_command.py' Jul 12 12:37:43 managed-node2 sudo[32528]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:37:43 managed-node2 platform-python[32531]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:43 managed-node2 sudo[32528]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:37:43 managed-node2 platform-python[32657]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:44 managed-node2 platform-python[32783]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:44 managed-node2 platform-python[32909]: ansible-uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:37:45 managed-node2 platform-python[33033]: ansible-uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:37:45 managed-node2 rsyslogd[1025]: imjournal: journal files changed, reloading... [v8.2102.0-15.el8 try https://www.rsyslog.com/e/0 ] Jul 12 12:37:45 managed-node2 platform-python[33158]: ansible-command Invoked with _raw_params=ls -alrtF /tmp/lsr__qviri30_podman/httpd1-create warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:45 managed-node2 platform-python[33282]: ansible-command Invoked with _raw_params=ls -alrtF /tmp/lsr__qviri30_podman/httpd2-create warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:46 managed-node2 platform-python[33406]: ansible-command Invoked with _raw_params=ls -alrtF /tmp/lsr__qviri30_podman/httpd3-create warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:49 managed-node2 platform-python[33655]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:50 managed-node2 platform-python[33784]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:37:53 managed-node2 platform-python[33909]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:37:56 managed-node2 platform-python[34032]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 12 12:37:56 managed-node2 platform-python[34159]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 12 12:37:57 managed-node2 platform-python[34286]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 12 12:37:59 managed-node2 platform-python[34409]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:38:02 managed-node2 platform-python[34532]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:38:05 managed-node2 platform-python[34655]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:38:08 managed-node2 platform-python[34778]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 12 12:38:10 managed-node2 platform-python[34939]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 12 12:38:10 managed-node2 platform-python[35062]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 12 12:38:15 managed-node2 platform-python[35185]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jul 12 12:38:15 managed-node2 platform-python[35309]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:38:16 managed-node2 platform-python[35434]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:16 managed-node2 platform-python[35558]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:17 managed-node2 platform-python[35682]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:18 managed-node2 platform-python[35806]: ansible-command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jul 12 12:38:19 managed-node2 platform-python[35929]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:19 managed-node2 platform-python[36052]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:19 managed-node2 sudo[36175]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sfjnrnyknupgcycrjkhhnhuswecfqpyf ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338299.776674-18742-47644857358508/AnsiballZ_podman_image.py' Jul 12 12:38:19 managed-node2 sudo[36175]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:38:20 managed-node2 systemd[25539]: Started podman-36180.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:38:20 managed-node2 systemd[25539]: Started podman-36189.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:38:20 managed-node2 systemd[25539]: Started podman-36197.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:38:20 managed-node2 systemd[25539]: Started podman-36205.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:38:20 managed-node2 systemd[25539]: Started podman-36213.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:38:20 managed-node2 systemd[25539]: Started podman-36222.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:38:20 managed-node2 sudo[36175]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:38:21 managed-node2 platform-python[36351]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:38:21 managed-node2 platform-python[36476]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:22 managed-node2 platform-python[36599]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 12 12:38:22 managed-node2 platform-python[36663]: ansible-file Invoked with owner=podman_basic_user group=3001 mode=0644 dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _original_basename=tmpxhmslwri recurse=False state=file path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:22 managed-node2 sudo[36786]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kddzobyvwijhudrubugwpxpljmgfafhb ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338302.4767652-18857-261073031296101/AnsiballZ_podman_play.py' Jul 12 12:38:22 managed-node2 sudo[36786]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:38:22 managed-node2 platform-python[36789]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:38:22 managed-node2 systemd[25539]: Started podman-36797.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:38:22 managed-node2 platform-python[36789]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jul 12 12:38:22 managed-node2 platform-python[36789]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Jul 12 12:38:22 managed-node2 platform-python[36789]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-07-12T12:38:22-04:00" level=info msg="/bin/podman filtering at log level debug" time="2025-07-12T12:38:22-04:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-07-12T12:38:22-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-07-12T12:38:22-04:00" level=info msg="Using sqlite as database backend" time="2025-07-12T12:38:22-04:00" level=debug msg="Using graph driver overlay" time="2025-07-12T12:38:22-04:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2025-07-12T12:38:22-04:00" level=debug msg="Using run root /run/user/3001/containers" time="2025-07-12T12:38:22-04:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2025-07-12T12:38:22-04:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2025-07-12T12:38:22-04:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2025-07-12T12:38:22-04:00" level=debug msg="Using transient store: false" time="2025-07-12T12:38:22-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-07-12T12:38:22-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-12T12:38:22-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-12T12:38:22-04:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2025-07-12T12:38:22-04:00" level=debug msg="Cached value indicated that native-diff is usable" time="2025-07-12T12:38:22-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2025-07-12T12:38:22-04:00" level=debug msg="Initializing event backend file" time="2025-07-12T12:38:22-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-07-12T12:38:22-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-07-12T12:38:22-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-07-12T12:38:22-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-07-12T12:38:22-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-07-12T12:38:22-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-07-12T12:38:22-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-07-12T12:38:22-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" time="2025-07-12T12:38:22-04:00" level=info msg="Setting parallel job count to 7" time="2025-07-12T12:38:22-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-12 12:37:16.97600692 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-07-12T12:38:22-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-07-12T12:38:22-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-12T12:38:22-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:38:22-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-12T12:38:22-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\"" time="2025-07-12T12:38:22-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-12T12:38:22-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566)" time="2025-07-12T12:38:22-04:00" level=debug msg="exporting opaque data as blob \"sha256:948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\"" time="2025-07-12T12:38:22-04:00" level=debug msg="Pod using bridge network mode" time="2025-07-12T12:38:22-04:00" level=debug msg="Got pod cgroup as /libpod_parent/36ca61264e7e11a7ce277e40b51ec55a9afdcde0d1c0d8549c5c14e962eb5314" Error: adding pod to state: name "httpd1" is in use: pod already exists time="2025-07-12T12:38:22-04:00" level=debug msg="Shutting down engines" Jul 12 12:38:22 managed-node2 platform-python[36789]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125 Jul 12 12:38:22 managed-node2 sudo[36786]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:38:23 managed-node2 platform-python[36952]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 12 12:38:24 managed-node2 platform-python[37076]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:38:25 managed-node2 platform-python[37201]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:26 managed-node2 platform-python[37325]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:27 managed-node2 platform-python[37448]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:28 managed-node2 platform-python[37737]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:38:28 managed-node2 platform-python[37862]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:29 managed-node2 platform-python[37985]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 12 12:38:29 managed-node2 platform-python[38049]: ansible-file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd2.yml _original_basename=tmpl5_fx80_ recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd2.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:29 managed-node2 platform-python[38172]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:38:29 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312.slice. -- Subject: Unit machine-libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312.slice has finished starting up. -- -- The start-up result is done. Jul 12 12:38:29 managed-node2 platform-python[38172]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Jul 12 12:38:29 managed-node2 platform-python[38172]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Jul 12 12:38:29 managed-node2 platform-python[38172]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-07-12T12:38:29-04:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2025-07-12T12:38:29-04:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-07-12T12:38:29-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-07-12T12:38:29-04:00" level=info msg="Using sqlite as database backend" time="2025-07-12T12:38:29-04:00" level=debug msg="Using graph driver overlay" time="2025-07-12T12:38:29-04:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2025-07-12T12:38:29-04:00" level=debug msg="Using run root /run/containers/storage" time="2025-07-12T12:38:29-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2025-07-12T12:38:29-04:00" level=debug msg="Using tmp dir /run/libpod" time="2025-07-12T12:38:29-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2025-07-12T12:38:29-04:00" level=debug msg="Using transient store: false" time="2025-07-12T12:38:29-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-07-12T12:38:29-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-12T12:38:29-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-12T12:38:29-04:00" level=debug msg="Cached value indicated that metacopy is being used" time="2025-07-12T12:38:29-04:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2025-07-12T12:38:29-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2025-07-12T12:38:29-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2025-07-12T12:38:29-04:00" level=debug msg="Initializing event backend file" time="2025-07-12T12:38:29-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-07-12T12:38:29-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-07-12T12:38:29-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-07-12T12:38:29-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-07-12T12:38:29-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-07-12T12:38:29-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-07-12T12:38:29-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-07-12T12:38:29-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" time="2025-07-12T12:38:29-04:00" level=info msg="Setting parallel job count to 7" time="2025-07-12T12:38:29-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-12 12:34:58.774465298 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-07-12T12:38:29-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-07-12T12:38:29-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-12T12:38:29-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:38:29-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-12T12:38:29-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\"" time="2025-07-12T12:38:29-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-12T12:38:29-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2)" time="2025-07-12T12:38:29-04:00" level=debug msg="exporting opaque data as blob \"sha256:4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\"" time="2025-07-12T12:38:29-04:00" level=debug msg="Pod using bridge network mode" time="2025-07-12T12:38:29-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312.slice for parent machine.slice and name libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312" time="2025-07-12T12:38:29-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312.slice" time="2025-07-12T12:38:29-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312.slice" Error: adding pod to state: name "httpd2" is in use: pod already exists time="2025-07-12T12:38:29-04:00" level=debug msg="Shutting down engines" Jul 12 12:38:29 managed-node2 platform-python[38172]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125 Jul 12 12:38:31 managed-node2 platform-python[38333]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:38:32 managed-node2 platform-python[38458]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:33 managed-node2 platform-python[38582]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:33 managed-node2 platform-python[38705]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:35 managed-node2 platform-python[38995]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:38:35 managed-node2 platform-python[39120]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:35 managed-node2 platform-python[39243]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 12 12:38:36 managed-node2 platform-python[39307]: ansible-file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd3.yml _original_basename=tmpb1ttu3ws recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd3.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:36 managed-node2 platform-python[39430]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:38:36 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_76dbf1d1588fd5e46783829b7ad9b35366a8e6eaea8987a62e2fa617ab2d64d4.slice. -- Subject: Unit machine-libpod_pod_76dbf1d1588fd5e46783829b7ad9b35366a8e6eaea8987a62e2fa617ab2d64d4.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_76dbf1d1588fd5e46783829b7ad9b35366a8e6eaea8987a62e2fa617ab2d64d4.slice has finished starting up. -- -- The start-up result is done. Jul 12 12:38:37 managed-node2 sudo[39591]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-reodnpkicydeipvtrpezylgtxbcjdhgz ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338317.3985052-19629-279558676694792/AnsiballZ_command.py' Jul 12 12:38:37 managed-node2 sudo[39591]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:38:37 managed-node2 platform-python[39594]: ansible-command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:37 managed-node2 systemd[25539]: Started podman-39603.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:38:37 managed-node2 sudo[39591]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:38:38 managed-node2 platform-python[39733]: ansible-command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:38 managed-node2 platform-python[39864]: ansible-command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:38 managed-node2 sudo[39995]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jdxtrtiiowdglcaeyhyrkpgebggwzera ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338318.6450086-19659-70124315420202/AnsiballZ_command.py' Jul 12 12:38:38 managed-node2 sudo[39995]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:38:38 managed-node2 platform-python[39998]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:38 managed-node2 sudo[39995]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:38:39 managed-node2 platform-python[40124]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:39 managed-node2 platform-python[40250]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:40 managed-node2 platform-python[40376]: ansible-uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:40 managed-node2 platform-python[40500]: ansible-uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:40 managed-node2 platform-python[40624]: ansible-uri Invoked with url=http://localhost:15003/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:43 managed-node2 platform-python[40873]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:44 managed-node2 platform-python[41002]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:38:47 managed-node2 platform-python[41127]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jul 12 12:38:48 managed-node2 platform-python[41251]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:38:48 managed-node2 platform-python[41376]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:49 managed-node2 platform-python[41500]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:50 managed-node2 platform-python[41624]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:50 managed-node2 platform-python[41748]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:38:51 managed-node2 sudo[41873]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-phdckzktiusimljvxxeqcswlbkptcgje ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338330.9997387-20292-117543446474536/AnsiballZ_systemd.py' Jul 12 12:38:51 managed-node2 sudo[41873]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:38:51 managed-node2 platform-python[41876]: ansible-systemd Invoked with name= scope=user state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 12 12:38:51 managed-node2 systemd[25539]: Reloading. Jul 12 12:38:51 managed-node2 systemd[25539]: Stopping A template for running K8s workloads via podman-kube-play... -- Subject: Unit UNIT has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun shutting down. Jul 12 12:38:51 managed-node2 kernel: cni-podman1: port 1(veth73ffc199) entered disabled state Jul 12 12:38:51 managed-node2 kernel: device veth73ffc199 left promiscuous mode Jul 12 12:38:51 managed-node2 kernel: cni-podman1: port 1(veth73ffc199) entered disabled state Jul 12 12:38:51 managed-node2 podman[41892]: Pods stopped: Jul 12 12:38:51 managed-node2 podman[41892]: e6000b000d16f866b55a1a19dd3fb3262c82c732a816136db57b7e8368f0a32a Jul 12 12:38:51 managed-node2 podman[41892]: Pods removed: Jul 12 12:38:51 managed-node2 podman[41892]: e6000b000d16f866b55a1a19dd3fb3262c82c732a816136db57b7e8368f0a32a Jul 12 12:38:51 managed-node2 podman[41892]: Secrets removed: Jul 12 12:38:51 managed-node2 podman[41892]: Volumes removed: Jul 12 12:38:51 managed-node2 systemd[25539]: Stopped A template for running K8s workloads via podman-kube-play. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 12 12:38:51 managed-node2 sudo[41873]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:38:52 managed-node2 platform-python[42165]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:38:52 managed-node2 sudo[42290]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nazeochktfswzfvlptenlckqnldzbmyv ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338332.4280062-20367-151061681885350/AnsiballZ_podman_play.py' Jul 12 12:38:52 managed-node2 sudo[42290]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:38:52 managed-node2 platform-python[42293]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:38:52 managed-node2 platform-python[42293]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jul 12 12:38:52 managed-node2 systemd[25539]: Started podman-42301.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:38:52 managed-node2 platform-python[42293]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman kube play --down /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jul 12 12:38:52 managed-node2 platform-python[42293]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped: Pods removed: Secrets removed: Volumes removed: Jul 12 12:38:52 managed-node2 platform-python[42293]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: Jul 12 12:38:52 managed-node2 platform-python[42293]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jul 12 12:38:52 managed-node2 sudo[42290]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:38:53 managed-node2 platform-python[42430]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:54 managed-node2 platform-python[42553]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 12 12:38:54 managed-node2 platform-python[42677]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:38:56 managed-node2 platform-python[42802]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:56 managed-node2 platform-python[42926]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 12 12:38:56 managed-node2 systemd[1]: Reloading. Jul 12 12:38:57 managed-node2 systemd[1]: Stopping A template for running K8s workloads via podman-kube-play... -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun shutting down. Jul 12 12:38:57 managed-node2 systemd[1]: libpod-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.scope has successfully entered the 'dead' state. Jul 12 12:38:57 managed-node2 systemd[1]: libpod-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.scope: Consumed 32ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.scope completed and consumed the indicated resources. Jul 12 12:38:57 managed-node2 systemd[1]: libpod-fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.scope has successfully entered the 'dead' state. Jul 12 12:38:57 managed-node2 systemd[1]: libpod-fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.scope: Consumed 34ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.scope completed and consumed the indicated resources. Jul 12 12:38:57 managed-node2 dnsmasq[29808]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Jul 12 12:38:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay-ad05b883b876cb925ec05b9fafaf9a8a37fd48a25d5d54b9615f3f4cdf0bd3b3-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-ad05b883b876cb925ec05b9fafaf9a8a37fd48a25d5d54b9615f3f4cdf0bd3b3-merged.mount has successfully entered the 'dead' state. Jul 12 12:38:57 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered disabled state Jul 12 12:38:57 managed-node2 kernel: device vethec9deee2 left promiscuous mode Jul 12 12:38:57 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered disabled state Jul 12 12:38:57 managed-node2 systemd[1]: run-netns-netns\x2d52414ca9\x2df342\x2dd1f3\x2d8cce\x2d232fb04744c1.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-netns-netns\x2d52414ca9\x2df342\x2dd1f3\x2d8cce\x2d232fb04744c1.mount has successfully entered the 'dead' state. Jul 12 12:38:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a-userdata-shm.mount has successfully entered the 'dead' state. Jul 12 12:38:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay-2d63d07bf8161ced4731534605fa38c1618204d50fc3a412c2eb303e296f3b5e-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-2d63d07bf8161ced4731534605fa38c1618204d50fc3a412c2eb303e296f3b5e-merged.mount has successfully entered the 'dead' state. Jul 12 12:38:57 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice. -- Subject: Unit machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice has finished shutting down. Jul 12 12:38:57 managed-node2 systemd[1]: machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice: Consumed 67ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice completed and consumed the indicated resources. Jul 12 12:38:57 managed-node2 podman[42962]: Pods stopped: Jul 12 12:38:57 managed-node2 podman[42962]: ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5 Jul 12 12:38:57 managed-node2 podman[42962]: Pods removed: Jul 12 12:38:57 managed-node2 podman[42962]: ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5 Jul 12 12:38:57 managed-node2 podman[42962]: Secrets removed: Jul 12 12:38:57 managed-node2 podman[42962]: Volumes removed: Jul 12 12:38:57 managed-node2 systemd[1]: libpod-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.scope has successfully entered the 'dead' state. Jul 12 12:38:57 managed-node2 systemd[1]: libpod-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.scope: Consumed 34ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.scope completed and consumed the indicated resources. Jul 12 12:38:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315-userdata-shm.mount has successfully entered the 'dead' state. Jul 12 12:38:57 managed-node2 dnsmasq[29808]: exiting on receipt of SIGTERM Jul 12 12:38:57 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has successfully entered the 'dead' state. Jul 12 12:38:57 managed-node2 systemd[1]: Stopped A template for running K8s workloads via podman-kube-play. -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished shutting down. Jul 12 12:38:58 managed-node2 platform-python[43238]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:38:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay-495aa6291e9f835076198c3e1c7b8cf1909ca8b5400bdf0e5a851ba0c44119c1-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-495aa6291e9f835076198c3e1c7b8cf1909ca8b5400bdf0e5a851ba0c44119c1-merged.mount has successfully entered the 'dead' state. Jul 12 12:38:58 managed-node2 platform-python[43363]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:38:58 managed-node2 platform-python[43363]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/httpd2.yml Jul 12 12:38:58 managed-node2 platform-python[43363]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman kube play --down /etc/containers/ansible-kubernetes.d/httpd2.yml Jul 12 12:38:58 managed-node2 platform-python[43363]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped: Pods removed: Secrets removed: Volumes removed: Jul 12 12:38:58 managed-node2 platform-python[43363]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: Jul 12 12:38:58 managed-node2 platform-python[43363]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jul 12 12:38:58 managed-node2 platform-python[43499]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:39:00 managed-node2 platform-python[43622]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:01 managed-node2 platform-python[43747]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:02 managed-node2 platform-python[43871]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 12 12:39:02 managed-node2 systemd[1]: Reloading. Jul 12 12:39:02 managed-node2 systemd[1]: Stopping A template for running K8s workloads via podman-kube-play... -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun shutting down. Jul 12 12:39:02 managed-node2 systemd[1]: libpod-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.scope has successfully entered the 'dead' state. Jul 12 12:39:02 managed-node2 systemd[1]: libpod-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.scope: Consumed 33ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.scope completed and consumed the indicated resources. Jul 12 12:39:02 managed-node2 systemd[1]: libpod-e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.scope has successfully entered the 'dead' state. Jul 12 12:39:02 managed-node2 systemd[1]: libpod-e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.scope: Consumed 33ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.scope completed and consumed the indicated resources. Jul 12 12:39:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay-9f3109ed9592a16625c27d2daaac765746798fb973c8fcb3160951dbc3c83474-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-9f3109ed9592a16625c27d2daaac765746798fb973c8fcb3160951dbc3c83474-merged.mount has successfully entered the 'dead' state. Jul 12 12:39:02 managed-node2 kernel: cni-podman1: port 2(veth3fe74d71) entered disabled state Jul 12 12:39:02 managed-node2 kernel: device veth3fe74d71 left promiscuous mode Jul 12 12:39:02 managed-node2 kernel: cni-podman1: port 2(veth3fe74d71) entered disabled state Jul 12 12:39:02 managed-node2 systemd[1]: run-netns-netns\x2dda1f9efe\x2d2607\x2d2465\x2d3389\x2d63a80a061169.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-netns-netns\x2dda1f9efe\x2d2607\x2d2465\x2d3389\x2d63a80a061169.mount has successfully entered the 'dead' state. Jul 12 12:39:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70-userdata-shm.mount has successfully entered the 'dead' state. Jul 12 12:39:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay-b047a8f535e44a79e89943c24ecd0f40472ad6c74487b61c695a5612de0f66e9-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-b047a8f535e44a79e89943c24ecd0f40472ad6c74487b61c695a5612de0f66e9-merged.mount has successfully entered the 'dead' state. Jul 12 12:39:02 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice. -- Subject: Unit machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice has finished shutting down. Jul 12 12:39:02 managed-node2 systemd[1]: machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice: Consumed 66ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice completed and consumed the indicated resources. Jul 12 12:39:02 managed-node2 podman[43907]: Pods stopped: Jul 12 12:39:02 managed-node2 podman[43907]: ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2 Jul 12 12:39:02 managed-node2 podman[43907]: Pods removed: Jul 12 12:39:02 managed-node2 podman[43907]: ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2 Jul 12 12:39:02 managed-node2 podman[43907]: Secrets removed: Jul 12 12:39:02 managed-node2 podman[43907]: Volumes removed: Jul 12 12:39:02 managed-node2 systemd[1]: libpod-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.scope has successfully entered the 'dead' state. Jul 12 12:39:02 managed-node2 systemd[1]: libpod-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.scope: Consumed 36ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.scope completed and consumed the indicated resources. Jul 12 12:39:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b-userdata-shm.mount has successfully entered the 'dead' state. Jul 12 12:39:02 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has successfully entered the 'dead' state. Jul 12 12:39:02 managed-node2 systemd[1]: Stopped A template for running K8s workloads via podman-kube-play. -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished shutting down. Jul 12 12:39:03 managed-node2 platform-python[44179]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay-a2ca6c0802e64aa881912046899069f9906c3a3ecb7fd7f0e60445f767b453fb-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-a2ca6c0802e64aa881912046899069f9906c3a3ecb7fd7f0e60445f767b453fb-merged.mount has successfully entered the 'dead' state. Jul 12 12:39:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:39:03 managed-node2 platform-python[44304]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:39:03 managed-node2 platform-python[44304]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/httpd3.yml Jul 12 12:39:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:39:04 managed-node2 platform-python[44440]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:39:05 managed-node2 platform-python[44563]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None Jul 12 12:39:05 managed-node2 platform-python[44687]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:05 managed-node2 sudo[44812]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-efsiwiyrgguftoqfmdsvrczsjrcdxihg ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338345.768981-21015-270850189165831/AnsiballZ_podman_container_info.py' Jul 12 12:39:05 managed-node2 sudo[44812]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:39:06 managed-node2 platform-python[44815]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None Jul 12 12:39:06 managed-node2 systemd[25539]: Started podman-44817.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:39:06 managed-node2 sudo[44812]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:39:06 managed-node2 sudo[44946]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lgxpwgdjqpsoqirugaueifldgtghyuxf ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338346.3029222-21038-175701710527734/AnsiballZ_command.py' Jul 12 12:39:06 managed-node2 sudo[44946]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:39:06 managed-node2 platform-python[44949]: ansible-command Invoked with _raw_params=podman network ls -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:06 managed-node2 systemd[25539]: Started podman-44951.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:39:06 managed-node2 sudo[44946]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:39:06 managed-node2 sudo[45105]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-plsfjelikobxnwisunpzotpprpzjinoh ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338346.7950253-21068-8970032815672/AnsiballZ_command.py' Jul 12 12:39:06 managed-node2 sudo[45105]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:39:07 managed-node2 platform-python[45108]: ansible-command Invoked with _raw_params=podman secret ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:07 managed-node2 systemd[25539]: Started podman-45110.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:39:07 managed-node2 sudo[45105]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:39:07 managed-node2 platform-python[45239]: ansible-command Invoked with removes=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl disable-linger podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None stdin=None Jul 12 12:39:07 managed-node2 systemd[1]: Stopping User Manager for UID 3001... -- Subject: Unit user@3001.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@3001.service has begun shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Stopping podman-pause-5a039c99.scope. -- Subject: Unit UNIT has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Stopped target Default. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Removed slice podman\x2dkube.slice. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Stopping D-Bus User Message Bus... -- Subject: Unit UNIT has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Stopped D-Bus User Message Bus. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Stopped target Basic System. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Stopped target Timers. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Stopped Mark boot as successful after the user session has run 2 minutes. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Stopped target Paths. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Stopped target Sockets. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Closed D-Bus User Message Bus Socket. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Stopped podman-pause-5a039c99.scope. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Removed slice user.slice. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Reached target Shutdown. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:39:07 managed-node2 systemd[25539]: Started Exit the Session. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:39:07 managed-node2 systemd[25539]: Reached target Exit the Session. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:39:07 managed-node2 systemd[25545]: pam_unix(systemd-user:session): session closed for user podman_basic_user Jul 12 12:39:07 managed-node2 systemd[1]: user@3001.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit user@3001.service has successfully entered the 'dead' state. Jul 12 12:39:07 managed-node2 systemd[1]: Stopped User Manager for UID 3001. -- Subject: Unit user@3001.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@3001.service has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[1]: Stopping User runtime directory /run/user/3001... -- Subject: Unit user-runtime-dir@3001.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@3001.service has begun shutting down. Jul 12 12:39:07 managed-node2 systemd[1]: run-user-3001.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-user-3001.mount has successfully entered the 'dead' state. Jul 12 12:39:07 managed-node2 systemd[1]: user-runtime-dir@3001.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit user-runtime-dir@3001.service has successfully entered the 'dead' state. Jul 12 12:39:07 managed-node2 systemd[1]: Stopped User runtime directory /run/user/3001. -- Subject: Unit user-runtime-dir@3001.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@3001.service has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[1]: Removed slice User Slice of UID 3001. -- Subject: Unit user-3001.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-3001.slice has finished shutting down. Jul 12 12:39:07 managed-node2 platform-python[45371]: ansible-command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:08 managed-node2 sudo[45495]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gazcsbdiijzpmpohefmybwwgcnpxuufr ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338348.4017277-21172-132611654922840/AnsiballZ_command.py' Jul 12 12:39:08 managed-node2 sudo[45495]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:39:08 managed-node2 platform-python[45498]: ansible-command Invoked with _raw_params=podman pod exists httpd1 warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:08 managed-node2 sudo[45495]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:39:09 managed-node2 platform-python[45628]: ansible-command Invoked with _raw_params=podman pod exists httpd2 warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:09 managed-node2 platform-python[45758]: ansible-command Invoked with _raw_params=podman pod exists httpd3 warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:09 managed-node2 sudo[45888]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pizwdchaqbkhharmotzkhmtxjzrasqsn ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338349.5796022-21223-271577239366846/AnsiballZ_command.py' Jul 12 12:39:09 managed-node2 sudo[45888]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:39:09 managed-node2 platform-python[45891]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:09 managed-node2 sudo[45888]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:39:10 managed-node2 platform-python[46017]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:10 managed-node2 platform-python[46143]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:10 managed-node2 platform-python[46269]: ansible-stat Invoked with path=/var/lib/systemd/linger/podman_basic_user follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:13 managed-node2 platform-python[46517]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:14 managed-node2 platform-python[46646]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 12 12:39:15 managed-node2 platform-python[46770]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:17 managed-node2 platform-python[46895]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jul 12 12:39:18 managed-node2 platform-python[47019]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:18 managed-node2 platform-python[47144]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:19 managed-node2 platform-python[47268]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:20 managed-node2 platform-python[47392]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:20 managed-node2 platform-python[47516]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:21 managed-node2 platform-python[47639]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:21 managed-node2 platform-python[47762]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:39:23 managed-node2 platform-python[47885]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 12 12:39:23 managed-node2 platform-python[48009]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:24 managed-node2 platform-python[48134]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:25 managed-node2 platform-python[48258]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 12 12:39:26 managed-node2 platform-python[48385]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:26 managed-node2 platform-python[48508]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:39:27 managed-node2 platform-python[48631]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:29 managed-node2 platform-python[48756]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:29 managed-node2 platform-python[48880]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 12 12:39:30 managed-node2 platform-python[49007]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:30 managed-node2 platform-python[49130]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:39:31 managed-node2 platform-python[49253]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None Jul 12 12:39:32 managed-node2 platform-python[49377]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:32 managed-node2 platform-python[49500]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:39:33 managed-node2 platform-python[49623]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:39:36 managed-node2 platform-python[49785]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version', 'os_family'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d Jul 12 12:39:36 managed-node2 platform-python[49912]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:36 managed-node2 platform-python[50035]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:39 managed-node2 platform-python[50283]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:40 managed-node2 platform-python[50412]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 12 12:39:41 managed-node2 platform-python[50536]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:43 managed-node2 platform-python[50700]: ansible-setup Invoked with gather_subset=['all'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d Jul 12 12:39:46 managed-node2 platform-python[50852]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:47 managed-node2 platform-python[50975]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:49 managed-node2 platform-python[51223]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:50 managed-node2 platform-python[51352]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 12 12:39:50 managed-node2 platform-python[51476]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:56 managed-node2 platform-python[51640]: ansible-setup Invoked with gather_subset=['all'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d Jul 12 12:39:57 managed-node2 platform-python[51792]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:57 managed-node2 platform-python[51915]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:58 managed-node2 platform-python[52039]: ansible-dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:40:02 managed-node2 platform-python[52167]: ansible-dnf Invoked with name=['certmonger'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:40:04 managed-node2 dbus-daemon[601]: [system] Reloaded configuration Jul 12 12:40:04 managed-node2 dbus-daemon[601]: [system] Reloaded configuration Jul 12 12:40:04 managed-node2 dbus-daemon[601]: [system] Reloaded configuration Jul 12 12:40:04 managed-node2 dbus-daemon[601]: [system] Reloaded configuration Jul 12 12:40:04 managed-node2 dbus-daemon[601]: [system] Reloaded configuration Jul 12 12:40:04 managed-node2 systemd[1]: Reloading. Jul 12 12:40:05 managed-node2 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. -- Subject: Unit run-rbd3345bfad0b449fb2e69833e5ca39b9.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit run-rbd3345bfad0b449fb2e69833e5ca39b9.service has finished starting up. -- -- The start-up result is done. Jul 12 12:40:05 managed-node2 systemd[1]: Starting man-db-cache-update.service... -- Subject: Unit man-db-cache-update.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has begun starting up. Jul 12 12:40:05 managed-node2 systemd[1]: Reloading. Jul 12 12:40:05 managed-node2 systemd[1]: man-db-cache-update.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit man-db-cache-update.service has successfully entered the 'dead' state. Jul 12 12:40:05 managed-node2 systemd[1]: Started man-db-cache-update.service. -- Subject: Unit man-db-cache-update.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has finished starting up. -- -- The start-up result is done. Jul 12 12:40:05 managed-node2 systemd[1]: run-rbd3345bfad0b449fb2e69833e5ca39b9.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-rbd3345bfad0b449fb2e69833e5ca39b9.service has successfully entered the 'dead' state. Jul 12 12:40:06 managed-node2 platform-python[52799]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:40:06 managed-node2 platform-python[52922]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:40:07 managed-node2 platform-python[53045]: ansible-systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 12 12:40:07 managed-node2 systemd[1]: Reloading. Jul 12 12:40:07 managed-node2 systemd[1]: Starting Certificate monitoring and PKI enrollment... -- Subject: Unit certmonger.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit certmonger.service has begun starting up. Jul 12 12:40:07 managed-node2 systemd[1]: Started Certificate monitoring and PKI enrollment. -- Subject: Unit certmonger.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit certmonger.service has finished starting up. -- -- The start-up result is done. Jul 12 12:40:08 managed-node2 platform-python[53238]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=# # Ansible managed # # system_role:certificate booted=True provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53254]: Certificate in file "/etc/pki/tls/certs/quadlet_demo.crt" issued by CA and saved. Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 platform-python[53376]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Jul 12 12:40:09 managed-node2 platform-python[53499]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key Jul 12 12:40:09 managed-node2 platform-python[53622]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Jul 12 12:40:10 managed-node2 platform-python[53745]: ansible-command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:40:10 managed-node2 certmonger[53081]: 2025-07-12 12:40:10 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:10 managed-node2 platform-python[53869]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:40:10 managed-node2 platform-python[53992]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:40:11 managed-node2 platform-python[54115]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:40:11 managed-node2 platform-python[54238]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:40:12 managed-node2 platform-python[54361]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:40:14 managed-node2 platform-python[54609]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:40:15 managed-node2 platform-python[54738]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 12 12:40:16 managed-node2 platform-python[54862]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:40:17 managed-node2 platform-python[54987]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:40:18 managed-node2 platform-python[55110]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:40:18 managed-node2 platform-python[55233]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:40:19 managed-node2 platform-python[55357]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:40:22 managed-node2 platform-python[55480]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 12 12:40:22 managed-node2 platform-python[55607]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 12 12:40:23 managed-node2 platform-python[55734]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 12 12:40:24 managed-node2 platform-python[55857]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 12 12:40:26 managed-node2 platform-python[55980]: ansible-command Invoked with _raw_params=journalctl -ex warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None TASK [Check] ******************************************************************* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:148 Saturday 12 July 2025 12:40:26 -0400 (0:00:00.427) 0:00:30.749 ********* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "-a" ], "delta": "0:00:00.080867", "end": "2025-07-12 12:40:26.848319", "rc": 0, "start": "2025-07-12 12:40:26.767452" } STDOUT: CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES TASK [Check pods] ************************************************************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:152 Saturday 12 July 2025 12:40:26 -0400 (0:00:00.432) 0:00:31.181 ********* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "pod", "ps", "--ctr-ids", "--ctr-names", "--ctr-status" ], "delta": "0:00:00.031792", "end": "2025-07-12 12:40:27.247136", "failed_when_result": false, "rc": 0, "start": "2025-07-12 12:40:27.215344" } STDOUT: POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS TASK [Check systemd] *********************************************************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:157 Saturday 12 July 2025 12:40:27 -0400 (0:00:00.397) 0:00:31.579 ********* ok: [managed-node2] => { "changed": false, "cmd": "set -euo pipefail; systemctl list-units --all | grep quadlet", "delta": "0:00:00.011298", "end": "2025-07-12 12:40:27.628343", "failed_when_result": false, "rc": 1, "start": "2025-07-12 12:40:27.617045" } MSG: non-zero return code TASK [LS] ********************************************************************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:165 Saturday 12 July 2025 12:40:27 -0400 (0:00:00.421) 0:00:32.001 ********* ok: [managed-node2] => { "changed": false, "cmd": [ "ls", "-alrtF", "/etc/systemd/system" ], "delta": "0:00:00.004798", "end": "2025-07-12 12:40:28.045674", "failed_when_result": false, "rc": 0, "start": "2025-07-12 12:40:28.040876" } STDOUT: total 8 lrwxrwxrwx. 1 root root 9 May 11 2019 systemd-timedated.service -> /dev/null drwxr-xr-x. 4 root root 169 May 29 2024 ../ lrwxrwxrwx. 1 root root 39 May 29 2024 syslog.service -> /usr/lib/systemd/system/rsyslog.service drwxr-xr-x. 2 root root 32 May 29 2024 getty.target.wants/ lrwxrwxrwx. 1 root root 37 May 29 2024 ctrl-alt-del.target -> /usr/lib/systemd/system/reboot.target lrwxrwxrwx. 1 root root 57 May 29 2024 dbus-org.freedesktop.nm-dispatcher.service -> /usr/lib/systemd/system/NetworkManager-dispatcher.service drwxr-xr-x. 2 root root 48 May 29 2024 network-online.target.wants/ lrwxrwxrwx. 1 root root 41 May 29 2024 dbus-org.freedesktop.timedate1.service -> /usr/lib/systemd/system/timedatex.service drwxr-xr-x. 2 root root 61 May 29 2024 timers.target.wants/ drwxr-xr-x. 2 root root 31 May 29 2024 basic.target.wants/ drwxr-xr-x. 2 root root 38 May 29 2024 dev-virtio\x2dports-org.qemu.guest_agent.0.device.wants/ lrwxrwxrwx. 1 root root 41 May 29 2024 default.target -> /usr/lib/systemd/system/multi-user.target drwxr-xr-x. 2 root root 51 May 29 2024 sockets.target.wants/ drwxr-xr-x. 2 root root 31 May 29 2024 remote-fs.target.wants/ drwxr-xr-x. 2 root root 59 May 29 2024 sshd-keygen@.service.d/ drwxr-xr-x. 2 root root 119 May 29 2024 cloud-init.target.wants/ drwxr-xr-x. 2 root root 181 May 29 2024 sysinit.target.wants/ lrwxrwxrwx. 1 root root 41 Jul 12 12:34 dbus-org.fedoraproject.FirewallD1.service -> /usr/lib/systemd/system/firewalld.service drwxr-xr-x. 13 root root 4096 Jul 12 12:39 ./ drwxr-xr-x. 2 root root 4096 Jul 12 12:40 multi-user.target.wants/ TASK [Cleanup] ***************************************************************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:172 Saturday 12 July 2025 12:40:28 -0400 (0:00:00.398) 0:00:32.400 ********* TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 12 July 2025 12:40:28 -0400 (0:00:00.105) 0:00:32.505 ********* included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 12 July 2025 12:40:28 -0400 (0:00:00.079) 0:00:32.584 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 12 July 2025 12:40:28 -0400 (0:00:00.064) 0:00:32.649 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 12 July 2025 12:40:28 -0400 (0:00:00.052) 0:00:32.701 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 12 July 2025 12:40:28 -0400 (0:00:00.048) 0:00:32.750 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 12 July 2025 12:40:28 -0400 (0:00:00.039) 0:00:32.789 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 12 July 2025 12:40:28 -0400 (0:00:00.035) 0:00:32.825 ********* ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node2] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 12 July 2025 12:40:28 -0400 (0:00:00.079) 0:00:32.905 ********* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 12 July 2025 12:40:30 -0400 (0:00:01.465) 0:00:34.370 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 12 July 2025 12:40:30 -0400 (0:00:00.040) 0:00:34.410 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 12 July 2025 12:40:30 -0400 (0:00:00.061) 0:00:34.472 ********* skipping: [managed-node2] => {} TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 12 July 2025 12:40:30 -0400 (0:00:00.050) 0:00:34.523 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 12 July 2025 12:40:30 -0400 (0:00:00.037) 0:00:34.560 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 12 July 2025 12:40:30 -0400 (0:00:00.033) 0:00:34.593 ********* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.026698", "end": "2025-07-12 12:40:30.643989", "rc": 0, "start": "2025-07-12 12:40:30.617291" } STDOUT: podman version 4.9.4-dev TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 12 July 2025 12:40:30 -0400 (0:00:00.387) 0:00:34.981 ********* ok: [managed-node2] => { "ansible_facts": { "podman_version": "4.9.4-dev" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 12 July 2025 12:40:30 -0400 (0:00:00.056) 0:00:35.038 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 12 July 2025 12:40:30 -0400 (0:00:00.058) 0:00:35.097 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 12 July 2025 12:40:30 -0400 (0:00:00.072) 0:00:35.170 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 12 July 2025 12:40:30 -0400 (0:00:00.094) 0:00:35.264 ********* included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 12 July 2025 12:40:31 -0400 (0:00:00.060) 0:00:35.325 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 12 July 2025 12:40:31 -0400 (0:00:00.098) 0:00:35.423 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 12 July 2025 12:40:31 -0400 (0:00:00.041) 0:00:35.465 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 12 July 2025 12:40:31 -0400 (0:00:00.041) 0:00:35.506 ********* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1752338066.7763715, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1752338037.94126, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986657, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "481438935", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 12 July 2025 12:40:31 -0400 (0:00:00.354) 0:00:35.861 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 12 July 2025 12:40:31 -0400 (0:00:00.034) 0:00:35.895 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 12 July 2025 12:40:31 -0400 (0:00:00.033) 0:00:35.929 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 12 July 2025 12:40:31 -0400 (0:00:00.032) 0:00:35.961 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 12 July 2025 12:40:31 -0400 (0:00:00.032) 0:00:35.994 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 12 July 2025 12:40:31 -0400 (0:00:00.032) 0:00:36.026 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 12 July 2025 12:40:31 -0400 (0:00:00.031) 0:00:36.058 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 12 July 2025 12:40:31 -0400 (0:00:00.039) 0:00:36.097 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 12 July 2025 12:40:31 -0400 (0:00:00.030) 0:00:36.128 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Saturday 12 July 2025 12:40:31 -0400 (0:00:00.073) 0:00:36.202 ********* included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 12 July 2025 12:40:31 -0400 (0:00:00.059) 0:00:36.261 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 12 July 2025 12:40:32 -0400 (0:00:00.030) 0:00:36.292 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Saturday 12 July 2025 12:40:32 -0400 (0:00:00.030) 0:00:36.323 ********* included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 12 July 2025 12:40:32 -0400 (0:00:00.092) 0:00:36.415 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 12 July 2025 12:40:32 -0400 (0:00:00.029) 0:00:36.445 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Saturday 12 July 2025 12:40:32 -0400 (0:00:00.030) 0:00:36.476 ********* included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Saturday 12 July 2025 12:40:32 -0400 (0:00:00.062) 0:00:36.539 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Saturday 12 July 2025 12:40:32 -0400 (0:00:00.030) 0:00:36.569 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Saturday 12 July 2025 12:40:32 -0400 (0:00:00.029) 0:00:36.599 ********* included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Saturday 12 July 2025 12:40:32 -0400 (0:00:00.066) 0:00:36.665 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Saturday 12 July 2025 12:40:32 -0400 (0:00:00.031) 0:00:36.697 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Saturday 12 July 2025 12:40:32 -0400 (0:00:00.031) 0:00:36.728 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Saturday 12 July 2025 12:40:32 -0400 (0:00:00.031) 0:00:36.760 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Saturday 12 July 2025 12:40:32 -0400 (0:00:00.031) 0:00:36.792 ********* TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Saturday 12 July 2025 12:40:32 -0400 (0:00:00.098) 0:00:36.891 ********* included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Saturday 12 July 2025 12:40:32 -0400 (0:00:00.096) 0:00:36.987 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Saturday 12 July 2025 12:40:32 -0400 (0:00:00.040) 0:00:37.028 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Saturday 12 July 2025 12:40:32 -0400 (0:00:00.032) 0:00:37.060 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Saturday 12 July 2025 12:40:32 -0400 (0:00:00.040) 0:00:37.100 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Saturday 12 July 2025 12:40:32 -0400 (0:00:00.031) 0:00:37.132 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Run systemctl] ********************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:34 Saturday 12 July 2025 12:40:32 -0400 (0:00:00.030) 0:00:37.162 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Require installed systemd] ********** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:41 Saturday 12 July 2025 12:40:32 -0400 (0:00:00.031) 0:00:37.194 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:46 Saturday 12 July 2025 12:40:32 -0400 (0:00:00.033) 0:00:37.228 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:51 Saturday 12 July 2025 12:40:32 -0400 (0:00:00.031) 0:00:37.259 ********* ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: firewalld TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:63 Saturday 12 July 2025 12:40:35 -0400 (0:00:02.821) 0:00:40.080 ********* skipping: [managed-node2] => {} TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:68 Saturday 12 July 2025 12:40:35 -0400 (0:00:00.031) 0:00:40.112 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:73 Saturday 12 July 2025 12:40:35 -0400 (0:00:00.040) 0:00:40.152 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check which conflicting services are enabled] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Saturday 12 July 2025 12:40:35 -0400 (0:00:00.031) 0:00:40.183 ********* skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "item": "ufw", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:14 Saturday 12 July 2025 12:40:35 -0400 (0:00:00.040) 0:00:40.224 ********* skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'item': 'nftables', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "item": { "ansible_loop_var": "item", "changed": false, "item": "nftables", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'item': 'iptables', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "item": { "ansible_loop_var": "item", "changed": false, "item": "iptables", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'item': 'ufw', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "item": { "ansible_loop_var": "item", "changed": false, "item": "ufw", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:24 Saturday 12 July 2025 12:40:35 -0400 (0:00:00.043) 0:00:40.267 ********* ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "ActiveEnterTimestamp": "Sat 2025-07-12 12:34:34 EDT", "ActiveEnterTimestampMonotonic": "334380508", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "dbus.socket polkit.service sysinit.target system.slice basic.target dbus.service", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-07-12 12:34:33 EDT", "AssertTimestampMonotonic": "333529803", "Before": "multi-user.target network-pre.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-07-12 12:34:33 EDT", "ConditionTimestampMonotonic": "333529801", "ConfigurationDirectoryMode": "0755", "Conflicts": "iptables.service shutdown.target nftables.service ipset.service ebtables.service ip6tables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "man:firewalld(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "12934", "ExecMainStartTimestamp": "Sat 2025-07-12 12:34:33 EDT", "ExecMainStartTimestampMonotonic": "333544279", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-07-12 12:34:33 EDT", "InactiveExitTimestampMonotonic": "333544451", "InvocationID": "23956a138bd04000a793482a1bdfdae2", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "12934", "MemoryAccounting": "yes", "MemoryCurrent": "42774528", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sat 2025-07-12 12:34:34 EDT", "StateChangeTimestampMonotonic": "334380508", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogTimestamp": "Sat 2025-07-12 12:34:34 EDT", "WatchdogTimestampMonotonic": "334380506", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:30 Saturday 12 July 2025 12:40:36 -0400 (0:00:00.484) 0:00:40.752 ********* ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "ActiveEnterTimestamp": "Sat 2025-07-12 12:34:34 EDT", "ActiveEnterTimestampMonotonic": "334380508", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "dbus.socket polkit.service sysinit.target system.slice basic.target dbus.service", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-07-12 12:34:33 EDT", "AssertTimestampMonotonic": "333529803", "Before": "multi-user.target network-pre.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-07-12 12:34:33 EDT", "ConditionTimestampMonotonic": "333529801", "ConfigurationDirectoryMode": "0755", "Conflicts": "iptables.service shutdown.target nftables.service ipset.service ebtables.service ip6tables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "man:firewalld(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "12934", "ExecMainStartTimestamp": "Sat 2025-07-12 12:34:33 EDT", "ExecMainStartTimestampMonotonic": "333544279", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-07-12 12:34:33 EDT", "InactiveExitTimestampMonotonic": "333544451", "InvocationID": "23956a138bd04000a793482a1bdfdae2", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "12934", "MemoryAccounting": "yes", "MemoryCurrent": "42774528", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sat 2025-07-12 12:34:34 EDT", "StateChangeTimestampMonotonic": "334380508", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogTimestamp": "Sat 2025-07-12 12:34:34 EDT", "WatchdogTimestampMonotonic": "334380506", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:36 Saturday 12 July 2025 12:40:36 -0400 (0:00:00.479) 0:00:41.231 ********* ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/libexec/platform-python", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:45 Saturday 12 July 2025 12:40:37 -0400 (0:00:00.038) 0:00:41.269 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:58 Saturday 12 July 2025 12:40:37 -0400 (0:00:00.032) 0:00:41.301 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:74 Saturday 12 July 2025 12:40:37 -0400 (0:00:00.031) 0:00:41.333 ********* ok: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "8000/tcp", "state": "enabled" } } ok: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "9000/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:126 Saturday 12 July 2025 12:40:38 -0400 (0:00:01.095) 0:00:42.429 ********* skipping: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "item": { "port": "8000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "item": { "port": "9000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:137 Saturday 12 July 2025 12:40:38 -0400 (0:00:00.050) 0:00:42.480 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:146 Saturday 12 July 2025 12:40:38 -0400 (0:00:00.035) 0:00:42.516 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:152 Saturday 12 July 2025 12:40:38 -0400 (0:00:00.032) 0:00:42.548 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:161 Saturday 12 July 2025 12:40:38 -0400 (0:00:00.033) 0:00:42.582 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:172 Saturday 12 July 2025 12:40:38 -0400 (0:00:00.029) 0:00:42.611 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:178 Saturday 12 July 2025 12:40:38 -0400 (0:00:00.030) 0:00:42.642 ********* skipping: [managed-node2] => {} TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Saturday 12 July 2025 12:40:38 -0400 (0:00:00.031) 0:00:42.674 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Saturday 12 July 2025 12:40:38 -0400 (0:00:00.029) 0:00:42.704 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Saturday 12 July 2025 12:40:38 -0400 (0:00:00.029) 0:00:42.733 ********* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Saturday 12 July 2025 12:40:38 -0400 (0:00:00.027) 0:00:42.760 ********* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Saturday 12 July 2025 12:40:38 -0400 (0:00:00.028) 0:00:42.789 ********* included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 12 July 2025 12:40:38 -0400 (0:00:00.096) 0:00:42.885 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 12 July 2025 12:40:38 -0400 (0:00:00.063) 0:00:42.949 ********* included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 12 July 2025 12:40:38 -0400 (0:00:00.058) 0:00:43.007 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 12 July 2025 12:40:38 -0400 (0:00:00.038) 0:00:43.046 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 12 July 2025 12:40:38 -0400 (0:00:00.036) 0:00:43.082 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 12 July 2025 12:40:38 -0400 (0:00:00.040) 0:00:43.123 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 12 July 2025 12:40:38 -0400 (0:00:00.029) 0:00:43.153 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 12 July 2025 12:40:38 -0400 (0:00:00.037) 0:00:43.191 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 12 July 2025 12:40:38 -0400 (0:00:00.031) 0:00:43.222 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 12 July 2025 12:40:38 -0400 (0:00:00.029) 0:00:43.252 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 12 July 2025 12:40:39 -0400 (0:00:00.030) 0:00:43.283 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 12 July 2025 12:40:39 -0400 (0:00:00.030) 0:00:43.313 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 12 July 2025 12:40:39 -0400 (0:00:00.029) 0:00:43.342 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 12 July 2025 12:40:39 -0400 (0:00:00.030) 0:00:43.373 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 12 July 2025 12:40:39 -0400 (0:00:00.029) 0:00:43.402 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 12 July 2025 12:40:39 -0400 (0:00:00.039) 0:00:43.442 ********* included: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 12 July 2025 12:40:39 -0400 (0:00:00.057) 0:00:43.500 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 12 July 2025 12:40:39 -0400 (0:00:00.063) 0:00:43.564 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 12 July 2025 12:40:39 -0400 (0:00:00.031) 0:00:43.596 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 12 July 2025 12:40:39 -0400 (0:00:00.030) 0:00:43.626 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 12 July 2025 12:40:39 -0400 (0:00:00.030) 0:00:43.657 ********* fatal: [managed-node2]: FAILED! => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result" } TASK [Debug] ******************************************************************* task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:199 Saturday 12 July 2025 12:40:39 -0400 (0:00:00.035) 0:00:43.692 ********* ok: [managed-node2] => { "changed": false, "cmd": "exec 1>&2\nset -x\nset -o pipefail\nsystemctl list-units --plain -l --all | grep quadlet || :\nsystemctl list-unit-files --all | grep quadlet || :\nsystemctl list-units --plain --failed -l --all | grep quadlet || :\n", "delta": "0:00:00.377333", "end": "2025-07-12 12:40:40.080521", "rc": 0, "start": "2025-07-12 12:40:39.703188" } STDERR: + set -o pipefail + systemctl list-units --plain -l --all + grep quadlet + : + systemctl list-unit-files --all + grep quadlet + : + systemctl list-units --plain --failed -l --all + grep quadlet + : TASK [Get journald] ************************************************************ task path: /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:209 Saturday 12 July 2025 12:40:40 -0400 (0:00:00.719) 0:00:44.412 ********* fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.027095", "end": "2025-07-12 12:40:40.451013", "failed_when_result": true, "rc": 0, "start": "2025-07-12 12:40:40.423918" } STDOUT: -- Logs begin at Sat 2025-07-12 12:29:00 EDT, end at Sat 2025-07-12 12:40:40 EDT. -- Jul 12 12:34:28 managed-node2 platform-python[12277]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:34:28 managed-node2 platform-python[12400]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:34:29 managed-node2 platform-python[12523]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:34:29 managed-node2 platform-python[12647]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:34:32 managed-node2 platform-python[12770]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 12 12:34:33 managed-node2 platform-python[12897]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 12 12:34:33 managed-node2 systemd[1]: Reloading. Jul 12 12:34:33 managed-node2 systemd[1]: Starting firewalld - dynamic firewall daemon... -- Subject: Unit firewalld.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit firewalld.service has begun starting up. Jul 12 12:34:34 managed-node2 systemd[1]: Started firewalld - dynamic firewall daemon. -- Subject: Unit firewalld.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit firewalld.service has finished starting up. -- -- The start-up result is done. Jul 12 12:34:34 managed-node2 firewalld[12934]: WARNING: AllowZoneDrifting is enabled. This is considered an insecure configuration option. It will be removed in a future release. Please consider disabling it now. Jul 12 12:34:35 managed-node2 platform-python[13124]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 12 12:34:36 managed-node2 platform-python[13247]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:34:36 managed-node2 platform-python[13370]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:34:37 managed-node2 platform-python[13493]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:34:40 managed-node2 platform-python[13616]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:34:42 managed-node2 platform-python[13739]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:34:45 managed-node2 dbus-daemon[601]: [system] Reloaded configuration Jul 12 12:34:45 managed-node2 dbus-daemon[601]: [system] Reloaded configuration Jul 12 12:34:45 managed-node2 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. -- Subject: Unit run-rc4136976cbe94ee39dd82aa6d795790f.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit run-rc4136976cbe94ee39dd82aa6d795790f.service has finished starting up. -- -- The start-up result is done. Jul 12 12:34:45 managed-node2 systemd[1]: Starting man-db-cache-update.service... -- Subject: Unit man-db-cache-update.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has begun starting up. Jul 12 12:34:46 managed-node2 systemd[1]: man-db-cache-update.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit man-db-cache-update.service has successfully entered the 'dead' state. Jul 12 12:34:46 managed-node2 systemd[1]: Started man-db-cache-update.service. -- Subject: Unit man-db-cache-update.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has finished starting up. -- -- The start-up result is done. Jul 12 12:34:46 managed-node2 systemd[1]: run-rc4136976cbe94ee39dd82aa6d795790f.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-rc4136976cbe94ee39dd82aa6d795790f.service has successfully entered the 'dead' state. Jul 12 12:34:46 managed-node2 platform-python[14345]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 12 12:34:47 managed-node2 platform-python[14493]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:34:48 managed-node2 platform-python[14617]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 12 12:34:49 managed-node2 kernel: SELinux: Converting 460 SID table entries... Jul 12 12:34:49 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Jul 12 12:34:49 managed-node2 kernel: SELinux: policy capability open_perms=1 Jul 12 12:34:49 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Jul 12 12:34:49 managed-node2 kernel: SELinux: policy capability always_check_network=0 Jul 12 12:34:49 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Jul 12 12:34:49 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jul 12 12:34:49 managed-node2 dbus-daemon[601]: [system] Reloaded configuration Jul 12 12:34:50 managed-node2 platform-python[14744]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 12 12:34:55 managed-node2 platform-python[14867]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:34:57 managed-node2 platform-python[14992]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:34:57 managed-node2 platform-python[15115]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:34:57 managed-node2 platform-python[15238]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 12 12:34:58 managed-node2 platform-python[15337]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/nopull.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752338097.5592623-9962-32376786540712/source _original_basename=tmp571i0p6f follow=False checksum=d5dc917e3cae36de03aa971a17ac473f86fdf934 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Jul 12 12:34:58 managed-node2 platform-python[15462]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:34:58 managed-node2 kernel: evm: overlay not supported Jul 12 12:34:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay-metacopy\x2dcheck2773103887-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-metacopy\x2dcheck2773103887-merged.mount has successfully entered the 'dead' state. Jul 12 12:34:59 managed-node2 systemd[1]: Created slice machine.slice. -- Subject: Unit machine.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine.slice has finished starting up. -- -- The start-up result is done. Jul 12 12:34:59 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice. -- Subject: Unit machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice has finished starting up. -- -- The start-up result is done. Jul 12 12:34:59 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:35:03 managed-node2 platform-python[15788]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:35:04 managed-node2 platform-python[15917]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:35:07 managed-node2 platform-python[16042]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:35:11 managed-node2 platform-python[16165]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 12 12:35:11 managed-node2 platform-python[16292]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 12 12:35:12 managed-node2 platform-python[16419]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 12 12:35:14 managed-node2 platform-python[16542]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:35:17 managed-node2 platform-python[16665]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:35:19 managed-node2 platform-python[16788]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:35:22 managed-node2 platform-python[16911]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 12 12:35:24 managed-node2 platform-python[17059]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 12 12:35:25 managed-node2 platform-python[17182]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 12 12:35:30 managed-node2 platform-python[17305]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:35:31 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:35:32 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:35:32 managed-node2 platform-python[17567]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:35:32 managed-node2 platform-python[17690]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:35:33 managed-node2 platform-python[17813]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 12 12:35:33 managed-node2 platform-python[17912]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/bogus.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752338133.0421798-11440-200522690369055/source _original_basename=tmpx4spj4rr follow=False checksum=f8266a972ed3be7e204d2a67883fe3a22b8dbf18 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Jul 12 12:35:34 managed-node2 platform-python[18037]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:35:34 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice. -- Subject: Unit machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice has finished starting up. -- -- The start-up result is done. Jul 12 12:35:34 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:35:37 managed-node2 platform-python[18324]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:35:38 managed-node2 platform-python[18453]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:35:40 managed-node2 platform-python[18578]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:35:43 managed-node2 platform-python[18701]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 12 12:35:44 managed-node2 platform-python[18828]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 12 12:35:45 managed-node2 platform-python[18955]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 12 12:35:47 managed-node2 platform-python[19078]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:35:50 managed-node2 platform-python[19201]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:35:52 managed-node2 platform-python[19324]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:35:55 managed-node2 platform-python[19447]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 12 12:35:57 managed-node2 platform-python[19595]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 12 12:35:58 managed-node2 platform-python[19718]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 12 12:36:02 managed-node2 platform-python[19841]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:36:03 managed-node2 platform-python[19966]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:36:04 managed-node2 platform-python[20090]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 12 12:36:04 managed-node2 platform-python[20217]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:36:05 managed-node2 platform-python[20342]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:36:05 managed-node2 platform-python[20342]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/nopull.yml Jul 12 12:36:05 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice. -- Subject: Unit machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice has finished shutting down. Jul 12 12:36:05 managed-node2 systemd[1]: machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice: Consumed 0 CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice completed and consumed the indicated resources. Jul 12 12:36:05 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:36:05 managed-node2 platform-python[20480]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:36:06 managed-node2 platform-python[20603]: ansible-command Invoked with _raw_params=podman image prune -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:36:09 managed-node2 platform-python[20858]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:36:10 managed-node2 platform-python[20987]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:36:13 managed-node2 platform-python[21112]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:36:16 managed-node2 platform-python[21235]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 12 12:36:17 managed-node2 platform-python[21362]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 12 12:36:18 managed-node2 platform-python[21489]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 12 12:36:19 managed-node2 platform-python[21612]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:36:22 managed-node2 platform-python[21735]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:36:25 managed-node2 platform-python[21858]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:36:28 managed-node2 platform-python[21981]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 12 12:36:30 managed-node2 platform-python[22129]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 12 12:36:31 managed-node2 platform-python[22252]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 12 12:36:35 managed-node2 platform-python[22375]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:36:36 managed-node2 platform-python[22500]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:36:37 managed-node2 platform-python[22624]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 12 12:36:37 managed-node2 platform-python[22751]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:36:38 managed-node2 platform-python[22876]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:36:38 managed-node2 platform-python[22876]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/bogus.yml Jul 12 12:36:38 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice. -- Subject: Unit machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice has finished shutting down. Jul 12 12:36:38 managed-node2 systemd[1]: machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice: Consumed 0 CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice completed and consumed the indicated resources. Jul 12 12:36:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:36:38 managed-node2 platform-python[23015]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:36:39 managed-node2 platform-python[23138]: ansible-command Invoked with _raw_params=podman image prune -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:36:39 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:36:42 managed-node2 platform-python[23394]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:36:44 managed-node2 platform-python[23523]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:36:46 managed-node2 platform-python[23648]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:36:50 managed-node2 platform-python[23771]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 12 12:36:50 managed-node2 platform-python[23898]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 12 12:36:51 managed-node2 platform-python[24025]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 12 12:36:52 managed-node2 platform-python[24148]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:36:55 managed-node2 platform-python[24271]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:36:58 managed-node2 platform-python[24394]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:37:01 managed-node2 platform-python[24517]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 12 12:37:03 managed-node2 platform-python[24665]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 12 12:37:04 managed-node2 platform-python[24788]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 12 12:37:08 managed-node2 platform-python[24911]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jul 12 12:37:09 managed-node2 platform-python[25036]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:37:09 managed-node2 platform-python[25161]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:10 managed-node2 platform-python[25285]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:11 managed-node2 platform-python[25409]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:12 managed-node2 platform-python[25533]: ansible-command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jul 12 12:37:12 managed-node2 systemd[1]: Created slice User Slice of UID 3001. -- Subject: Unit user-3001.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-3001.slice has finished starting up. -- -- The start-up result is done. Jul 12 12:37:12 managed-node2 systemd[1]: Starting User runtime directory /run/user/3001... -- Subject: Unit user-runtime-dir@3001.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@3001.service has begun starting up. Jul 12 12:37:12 managed-node2 systemd[1]: Started User runtime directory /run/user/3001. -- Subject: Unit user-runtime-dir@3001.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@3001.service has finished starting up. -- -- The start-up result is done. Jul 12 12:37:12 managed-node2 systemd[1]: Starting User Manager for UID 3001... -- Subject: Unit user@3001.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@3001.service has begun starting up. Jul 12 12:37:12 managed-node2 systemd[25539]: pam_unix(systemd-user:session): session opened for user podman_basic_user by (uid=0) Jul 12 12:37:12 managed-node2 systemd[25539]: Reached target Paths. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:12 managed-node2 systemd[25539]: Started Mark boot as successful after the user session has run 2 minutes. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:12 managed-node2 systemd[25539]: Starting D-Bus User Message Bus Socket. -- Subject: Unit UNIT has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun starting up. Jul 12 12:37:12 managed-node2 systemd[25539]: Reached target Timers. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:12 managed-node2 systemd[25539]: Listening on D-Bus User Message Bus Socket. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:12 managed-node2 systemd[25539]: Reached target Sockets. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:12 managed-node2 systemd[25539]: Reached target Basic System. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:12 managed-node2 systemd[25539]: Reached target Default. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:12 managed-node2 systemd[25539]: Startup finished in 28ms. -- Subject: User manager start-up is now complete -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The user manager instance for user 3001 has been started. All services queued -- for starting have been started. Note that other services might still be starting -- up or be started at any later time. -- -- Startup of the manager took 28872 microseconds. Jul 12 12:37:12 managed-node2 systemd[1]: Started User Manager for UID 3001. -- Subject: Unit user@3001.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@3001.service has finished starting up. -- -- The start-up result is done. Jul 12 12:37:13 managed-node2 platform-python[25674]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:37:13 managed-node2 platform-python[25797]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:37:13 managed-node2 sudo[25920]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-flowetcsnhyltwcqlvhwzynouopxqrjl ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338233.6175296-15753-169052845046334/AnsiballZ_podman_image.py' Jul 12 12:37:13 managed-node2 sudo[25920]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:37:14 managed-node2 systemd[25539]: Started D-Bus User Message Bus. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:14 managed-node2 systemd[25539]: Created slice user.slice. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:14 managed-node2 systemd[25539]: Started podman-25932.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:14 managed-node2 systemd[25539]: Started podman-pause-5a039c99.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:14 managed-node2 systemd[25539]: Started podman-25948.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:14 managed-node2 systemd[25539]: Started podman-25963.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:14 managed-node2 sudo[25920]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:37:15 managed-node2 platform-python[26093]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:37:15 managed-node2 platform-python[26216]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:37:16 managed-node2 platform-python[26339]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 12 12:37:16 managed-node2 platform-python[26438]: ansible-copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752338235.9099538-15874-160470408042927/source _original_basename=tmphfu4mgeo follow=False checksum=effe6499c246b4e7daac7803b02ca2cad861ad5c backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Jul 12 12:37:16 managed-node2 sudo[26563]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ggfotomaneyfnrfutjcomejzhhvgfhsm ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338236.608599-15911-85925477640473/AnsiballZ_podman_play.py' Jul 12 12:37:16 managed-node2 sudo[26563]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:37:16 managed-node2 platform-python[26566]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:37:16 managed-node2 systemd[25539]: Started podman-26574.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:17 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6 Jul 12 12:37:17 managed-node2 systemd[25539]: Started rootless-netns-cfbb367e.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:17 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Jul 12 12:37:17 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): veth14aad36c: link is not ready Jul 12 12:37:17 managed-node2 kernel: cni-podman1: port 1(veth14aad36c) entered blocking state Jul 12 12:37:17 managed-node2 kernel: cni-podman1: port 1(veth14aad36c) entered disabled state Jul 12 12:37:17 managed-node2 kernel: device veth14aad36c entered promiscuous mode Jul 12 12:37:17 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready Jul 12 12:37:17 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready Jul 12 12:37:17 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth14aad36c: link becomes ready Jul 12 12:37:17 managed-node2 kernel: cni-podman1: port 1(veth14aad36c) entered blocking state Jul 12 12:37:17 managed-node2 kernel: cni-podman1: port 1(veth14aad36c) entered forwarding state Jul 12 12:37:17 managed-node2 dnsmasq[26760]: listening on cni-podman1(#3): 10.89.0.1 Jul 12 12:37:17 managed-node2 dnsmasq[26762]: started, version 2.79 cachesize 150 Jul 12 12:37:17 managed-node2 dnsmasq[26762]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Jul 12 12:37:17 managed-node2 dnsmasq[26762]: using local addresses only for domain dns.podman Jul 12 12:37:17 managed-node2 dnsmasq[26762]: reading /etc/resolv.conf Jul 12 12:37:17 managed-node2 dnsmasq[26762]: using local addresses only for domain dns.podman Jul 12 12:37:17 managed-node2 dnsmasq[26762]: using nameserver 10.0.2.3#53 Jul 12 12:37:17 managed-node2 dnsmasq[26762]: using nameserver 10.29.169.13#53 Jul 12 12:37:17 managed-node2 dnsmasq[26762]: using nameserver 10.29.170.12#53 Jul 12 12:37:17 managed-node2 dnsmasq[26762]: using nameserver 10.2.32.1#53 Jul 12 12:37:17 managed-node2 dnsmasq[26762]: read /run/user/3001/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Jul 12 12:37:17 managed-node2 conmon[26776]: conmon db962b9f1559ffd15c96 : failed to write to /proc/self/oom_score_adj: Permission denied Jul 12 12:37:17 managed-node2 conmon[26777]: conmon db962b9f1559ffd15c96 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach} Jul 12 12:37:17 managed-node2 conmon[26777]: conmon db962b9f1559ffd15c96 : terminal_ctrl_fd: 14 Jul 12 12:37:17 managed-node2 conmon[26777]: conmon db962b9f1559ffd15c96 : winsz read side: 17, winsz write side: 18 Jul 12 12:37:17 managed-node2 conmon[26777]: conmon db962b9f1559ffd15c96 : container PID: 26787 Jul 12 12:37:17 managed-node2 conmon[26797]: conmon 8b812a2ec55f9de0cde0 : failed to write to /proc/self/oom_score_adj: Permission denied Jul 12 12:37:17 managed-node2 conmon[26798]: conmon 8b812a2ec55f9de0cde0 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Jul 12 12:37:17 managed-node2 conmon[26798]: conmon 8b812a2ec55f9de0cde0 : terminal_ctrl_fd: 13 Jul 12 12:37:17 managed-node2 conmon[26798]: conmon 8b812a2ec55f9de0cde0 : winsz read side: 16, winsz write side: 17 Jul 12 12:37:17 managed-node2 conmon[26798]: conmon 8b812a2ec55f9de0cde0 : container PID: 26808 Jul 12 12:37:17 managed-node2 platform-python[26566]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jul 12 12:37:17 managed-node2 platform-python[26566]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: 49a038584fa170b91928655aabc3334b950d706880329a96a99914f26bc7c59d Container: 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 Jul 12 12:37:17 managed-node2 platform-python[26566]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-07-12T12:37:16-04:00" level=info msg="/bin/podman filtering at log level debug" time="2025-07-12T12:37:16-04:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-07-12T12:37:16-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-07-12T12:37:16-04:00" level=info msg="Using sqlite as database backend" time="2025-07-12T12:37:16-04:00" level=debug msg="Using graph driver overlay" time="2025-07-12T12:37:16-04:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2025-07-12T12:37:16-04:00" level=debug msg="Using run root /run/user/3001/containers" time="2025-07-12T12:37:16-04:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2025-07-12T12:37:16-04:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2025-07-12T12:37:16-04:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2025-07-12T12:37:16-04:00" level=debug msg="Using transient store: false" time="2025-07-12T12:37:16-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-07-12T12:37:16-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-12T12:37:16-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-12T12:37:16-04:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2025-07-12T12:37:16-04:00" level=debug msg="Cached value indicated that native-diff is usable" time="2025-07-12T12:37:16-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2025-07-12T12:37:16-04:00" level=debug msg="Initializing event backend file" time="2025-07-12T12:37:16-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-07-12T12:37:16-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-07-12T12:37:16-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-07-12T12:37:16-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-07-12T12:37:16-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-07-12T12:37:16-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-07-12T12:37:16-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-07-12T12:37:16-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" time="2025-07-12T12:37:16-04:00" level=info msg="Setting parallel job count to 7" time="2025-07-12T12:37:16-04:00" level=debug msg="Successfully loaded 1 networks" time="2025-07-12T12:37:16-04:00" level=debug msg="found free device name cni-podman1" time="2025-07-12T12:37:16-04:00" level=debug msg="found free ipv4 network subnet 10.89.0.0/24" time="2025-07-12T12:37:16-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-12 12:37:16.97600692 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-07-12T12:37:16-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-07-12T12:37:16-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-12T12:37:16-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:16-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-12T12:37:16-04:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\" does not resolve to an image ID" time="2025-07-12T12:37:16-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-12T12:37:16-04:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\" does not resolve to an image ID" time="2025-07-12T12:37:16-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-12T12:37:16-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:16-04:00" level=debug msg="FROM \"scratch\"" time="2025-07-12T12:37:16-04:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2025-07-12T12:37:16-04:00" level=debug msg="Check for idmapped mounts support " time="2025-07-12T12:37:16-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:16-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:17-04:00" level=debug msg="overlay: test mount indicated that volatile is being used" time="2025-07-12T12:37:17-04:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/221eaffe115c1eab98bc107fb1d33a30792204936b8794cac38b4263ec8bc263/empty,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/221eaffe115c1eab98bc107fb1d33a30792204936b8794cac38b4263ec8bc263/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/221eaffe115c1eab98bc107fb1d33a30792204936b8794cac38b4263ec8bc263/work,userxattr,volatile,context=\"system_u:object_r:container_file_t:s0:c99,c874\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Container ID: 74b081262df1d810c422dbcbe1db2f5a2adc384492d57cda98cbd9e90ab37ee1" time="2025-07-12T12:37:17-04:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:copy Args:[/usr/libexec/podman/catatonit /catatonit] Flags:[] Attrs:map[] Message:COPY /usr/libexec/podman/catatonit /catatonit Heredocs:[] Original:COPY /usr/libexec/podman/catatonit /catatonit}" time="2025-07-12T12:37:17-04:00" level=debug msg="COPY []string(nil), imagebuilder.Copy{FromFS:false, From:\"\", Src:[]string{\"/usr/libexec/podman/catatonit\"}, Dest:\"/catatonit\", Download:false, Chown:\"\", Chmod:\"\", Checksum:\"\", Files:[]imagebuilder.File(nil)}" time="2025-07-12T12:37:17-04:00" level=debug msg="added content file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd" time="2025-07-12T12:37:17-04:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:entrypoint Args:[/catatonit -P] Flags:[] Attrs:map[json:true] Message:ENTRYPOINT /catatonit -P Heredocs:[] Original:ENTRYPOINT [\"/catatonit\", \"-P\"]}" time="2025-07-12T12:37:17-04:00" level=debug msg="COMMIT localhost/podman-pause:4.9.4-dev-1708535009" time="2025-07-12T12:37:17-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\"" time="2025-07-12T12:37:17-04:00" level=debug msg="COMMIT \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\"" time="2025-07-12T12:37:17-04:00" level=debug msg="committing image with reference \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\" is allowed by policy" time="2025-07-12T12:37:17-04:00" level=debug msg="layer list: [\"221eaffe115c1eab98bc107fb1d33a30792204936b8794cac38b4263ec8bc263\"]" time="2025-07-12T12:37:17-04:00" level=debug msg="using \"/var/tmp/buildah1838958819\" to hold temporary data" time="2025-07-12T12:37:17-04:00" level=debug msg="Tar with options on /home/podman_basic_user/.local/share/containers/storage/overlay/221eaffe115c1eab98bc107fb1d33a30792204936b8794cac38b4263ec8bc263/diff" time="2025-07-12T12:37:17-04:00" level=debug msg="layer \"221eaffe115c1eab98bc107fb1d33a30792204936b8794cac38b4263ec8bc263\" size is 767488 bytes, uncompressed digest sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690, possibly-compressed digest sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690" time="2025-07-12T12:37:17-04:00" level=debug msg="OCIv1 config = {\"created\":\"2025-07-12T16:37:17.118933835Z\",\"architecture\":\"amd64\",\"os\":\"linux\",\"config\":{\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Entrypoint\":[\"/catatonit\",\"-P\"],\"Labels\":{\"io.buildah.version\":\"1.33.5\"}},\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"]},\"history\":[{\"created\":\"2025-07-12T16:37:17.118347731Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-07-12T16:37:17.122165868Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-07-12T12:37:17-04:00" level=debug msg="OCIv1 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.oci.image.manifest.v1+json\",\"config\":{\"mediaType\":\"application/vnd.oci.image.config.v1+json\",\"digest\":\"sha256:948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\",\"size\":668},\"layers\":[{\"mediaType\":\"application/vnd.oci.image.layer.v1.tar\",\"digest\":\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\",\"size\":767488}],\"annotations\":{\"org.opencontainers.image.base.digest\":\"\",\"org.opencontainers.image.base.name\":\"\"}}" time="2025-07-12T12:37:17-04:00" level=debug msg="Docker v2s2 config = {\"created\":\"2025-07-12T16:37:17.118933835Z\",\"container\":\"74b081262df1d810c422dbcbe1db2f5a2adc384492d57cda98cbd9e90ab37ee1\",\"container_config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":[],\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.33.5\"}},\"config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":[],\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.33.5\"}},\"architecture\":\"amd64\",\"os\":\"linux\",\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"]},\"history\":[{\"created\":\"2025-07-12T16:37:17.118347731Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-07-12T16:37:17.122165868Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-07-12T12:37:17-04:00" level=debug msg="Docker v2s2 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.docker.distribution.manifest.v2+json\",\"config\":{\"mediaType\":\"application/vnd.docker.container.image.v1+json\",\"size\":1342,\"digest\":\"sha256:706c7e5b14dda8248bcff3ec5c250761bd8f764535609aa9365ce9e4b43361c2\"},\"layers\":[{\"mediaType\":\"application/vnd.docker.image.rootfs.diff.tar\",\"size\":767488,\"digest\":\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"}]}" time="2025-07-12T12:37:17-04:00" level=debug msg="Using SQLite blob info cache at /home/podman_basic_user/.local/share/containers/cache/blob-info-cache-v1.sqlite" time="2025-07-12T12:37:17-04:00" level=debug msg="IsRunningImageAllowed for image containers-storage:" time="2025-07-12T12:37:17-04:00" level=debug msg=" Using transport \"containers-storage\" policy section " time="2025-07-12T12:37:17-04:00" level=debug msg=" Requirement 0: allowed" time="2025-07-12T12:37:17-04:00" level=debug msg="Overall: allowed" time="2025-07-12T12:37:17-04:00" level=debug msg="start reading config" time="2025-07-12T12:37:17-04:00" level=debug msg="finished reading config" time="2025-07-12T12:37:17-04:00" level=debug msg="Manifest has MIME type application/vnd.oci.image.manifest.v1+json, ordered candidate list [application/vnd.oci.image.manifest.v1+json, application/vnd.docker.distribution.manifest.v2+json, application/vnd.docker.distribution.manifest.v1+prettyjws, application/vnd.docker.distribution.manifest.v1+json]" time="2025-07-12T12:37:17-04:00" level=debug msg="... will first try using the original manifest unmodified" time="2025-07-12T12:37:17-04:00" level=debug msg="Checking if we can reuse blob sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690: general substitution = true, compression for MIME type \"application/vnd.oci.image.layer.v1.tar\" = true" time="2025-07-12T12:37:17-04:00" level=debug msg="reading layer \"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"" time="2025-07-12T12:37:17-04:00" level=debug msg="No compression detected" time="2025-07-12T12:37:17-04:00" level=debug msg="Using original blob without modification" time="2025-07-12T12:37:17-04:00" level=debug msg="Applying tar in /home/podman_basic_user/.local/share/containers/storage/overlay/d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690/diff" time="2025-07-12T12:37:17-04:00" level=debug msg="finished reading layer \"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"" time="2025-07-12T12:37:17-04:00" level=debug msg="No compression detected" time="2025-07-12T12:37:17-04:00" level=debug msg="Compression change for blob sha256:948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566 (\"application/vnd.oci.image.config.v1+json\") not supported" time="2025-07-12T12:37:17-04:00" level=debug msg="Using original blob without modification" time="2025-07-12T12:37:17-04:00" level=debug msg="setting image creation date to 2025-07-12 16:37:17.118933835 +0000 UTC" time="2025-07-12T12:37:17-04:00" level=debug msg="created new image ID \"948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\" with metadata \"{}\"" time="2025-07-12T12:37:17-04:00" level=debug msg="added name \"localhost/podman-pause:4.9.4-dev-1708535009\" to image \"948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\"" time="2025-07-12T12:37:17-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\"" time="2025-07-12T12:37:17-04:00" level=debug msg="printing final image id \"948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Pod using bridge network mode" time="2025-07-12T12:37:17-04:00" level=debug msg="Got pod cgroup as /libpod_parent/49a038584fa170b91928655aabc3334b950d706880329a96a99914f26bc7c59d" time="2025-07-12T12:37:17-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-12T12:37:17-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:17-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-12T12:37:17-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-12T12:37:17-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566)" time="2025-07-12T12:37:17-04:00" level=debug msg="exporting opaque data as blob \"sha256:948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Inspecting image 948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566" time="2025-07-12T12:37:17-04:00" level=debug msg="exporting opaque data as blob \"sha256:948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Inspecting image 948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566" time="2025-07-12T12:37:17-04:00" level=debug msg="Inspecting image 948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566" time="2025-07-12T12:37:17-04:00" level=debug msg="using systemd mode: false" time="2025-07-12T12:37:17-04:00" level=debug msg="setting container name 49a038584fa1-infra" time="2025-07-12T12:37:17-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Allocated lock 1 for container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70" time="2025-07-12T12:37:17-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\"" time="2025-07-12T12:37:17-04:00" level=debug msg="exporting opaque data as blob \"sha256:948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Created container \"db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Container \"db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Container \"db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70\" has run directory \"/run/user/3001/containers/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:17-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:17-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-12T12:37:17-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:17-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-12T12:37:17-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-07-12T12:37:17-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:17-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:17-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-12T12:37:17-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:17-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-12T12:37:17-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:17-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:17-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-12T12:37:17-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:17-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-12T12:37:17-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-12T12:37:17-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:17-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:17-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-12T12:37:17-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:17-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-12T12:37:17-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-12T12:37:17-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-12T12:37:17-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-12T12:37:17-04:00" level=debug msg="using systemd mode: false" time="2025-07-12T12:37:17-04:00" level=debug msg="adding container to pod httpd1" time="2025-07-12T12:37:17-04:00" level=debug msg="setting container name httpd1-httpd1" time="2025-07-12T12:37:17-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-07-12T12:37:17-04:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-07-12T12:37:17-04:00" level=debug msg="Adding mount /proc" time="2025-07-12T12:37:17-04:00" level=debug msg="Adding mount /dev" time="2025-07-12T12:37:17-04:00" level=debug msg="Adding mount /dev/pts" time="2025-07-12T12:37:17-04:00" level=debug msg="Adding mount /dev/mqueue" time="2025-07-12T12:37:17-04:00" level=debug msg="Adding mount /sys" time="2025-07-12T12:37:17-04:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-07-12T12:37:17-04:00" level=debug msg="Allocated lock 2 for container 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057" time="2025-07-12T12:37:17-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Created container \"8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Container \"8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Container \"8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057\" has run directory \"/run/user/3001/containers/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Strongconnecting node db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70" time="2025-07-12T12:37:17-04:00" level=debug msg="Pushed db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 onto stack" time="2025-07-12T12:37:17-04:00" level=debug msg="Finishing node db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70. Popped db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 off stack" time="2025-07-12T12:37:17-04:00" level=debug msg="Strongconnecting node 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057" time="2025-07-12T12:37:17-04:00" level=debug msg="Pushed 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 onto stack" time="2025-07-12T12:37:17-04:00" level=debug msg="Finishing node 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057. Popped 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 off stack" time="2025-07-12T12:37:17-04:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/OM4I4NAT7NV6G6FUUDQFTEASSZ,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/a85a1e9326b5058f772b4fc440996b4ba136c02bad6966e471895a60f615298c/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/a85a1e9326b5058f772b4fc440996b4ba136c02bad6966e471895a60f615298c/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c277,c351\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Mounted container \"db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/a85a1e9326b5058f772b4fc440996b4ba136c02bad6966e471895a60f615298c/merged\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Created root filesystem for container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 at /home/podman_basic_user/.local/share/containers/storage/overlay/a85a1e9326b5058f772b4fc440996b4ba136c02bad6966e471895a60f615298c/merged" time="2025-07-12T12:37:17-04:00" level=debug msg="Made network namespace at /run/user/3001/netns/netns-d0ac84ca-ca87-3466-1642-2cff38531036 for container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70" time="2025-07-12T12:37:17-04:00" level=debug msg="creating rootless network namespace with name \"rootless-netns-d22c9f230d0691b8f418\"" time="2025-07-12T12:37:17-04:00" level=debug msg="slirp4netns command: /bin/slirp4netns --disable-host-loopback --mtu=65520 --enable-sandbox --enable-seccomp --enable-ipv6 -c -r 3 --netns-type=path /run/user/3001/netns/rootless-netns-d22c9f230d0691b8f418 tap0" time="2025-07-12T12:37:17-04:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" time="2025-07-12T12:37:17-04:00" level=debug msg="cni result for container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 network podman-default-kube-network: &{0.4.0 [{Name:cni-podman1 Mac:0a:fa:9a:36:b8:3a Sandbox:} {Name:veth14aad36c Mac:82:8b:99:b5:f7:b0 Sandbox:} {Name:eth0 Mac:b6:e7:40:6d:da:9c Sandbox:/run/user/3001/netns/netns-d0ac84ca-ca87-3466-1642-2cff38531036}] [{Version:4 Interface:0xc0008e9188 Address:{IP:10.89.0.2 Mask:ffffff00} Gateway:10.89.0.1}] [{Dst:{IP:0.0.0.0 Mask:00000000} GW:}] {[10.89.0.1] [dns.podman] []}}" time="2025-07-12T12:37:17-04:00" level=debug msg="rootlessport: time=\"2025-07-12T12:37:17-04:00\" level=info msg=\"Starting parent driver\"\ntime=\"2025-07-12T12:37:17-04:00\" level=info msg=\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport4142254753/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport4142254753/.bp.sock]\"\ntime=\"2025-07-12T12:37:17-04:00\" level=info msg=\"Starting child driver in child netns (\\\"/proc/self/exe\\\" [rootlessport-child])\"\n" time="2025-07-12T12:37:17-04:00" level=debug msg="rootlessport: time=\"2025-07-12T12:37:17-04:00\" level=info msg=\"Waiting for initComplete\"\n" time="2025-07-12T12:37:17-04:00" level=debug msg="rootlessport: time=\"2025-07-12T12:37:17-04:00\" level=info msg=\"initComplete is closed; parent and child established the communication channel\"\n" time="2025-07-12T12:37:17-04:00" level=debug msg="rootlessport: time=\"2025-07-12T12:37:17-04:00\" level=info msg=\"Exposing ports [{ 80 15001 1 tcp}]\"\n" time="2025-07-12T12:37:17-04:00" level=debug msg="rootlessport is ready" time="2025-07-12T12:37:17-04:00" level=debug msg="rootlessport: time=\"2025-07-12T12:37:17-04:00\" level=info msg=Ready\n" time="2025-07-12T12:37:17-04:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2025-07-12T12:37:17-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-07-12T12:37:17-04:00" level=debug msg="Workdir \"/\" resolved to host path \"/home/podman_basic_user/.local/share/containers/storage/overlay/a85a1e9326b5058f772b4fc440996b4ba136c02bad6966e471895a60f615298c/merged\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Created OCI spec for container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata/config.json" time="2025-07-12T12:37:17-04:00" level=debug msg="Got pod cgroup as " time="2025-07-12T12:37:17-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-07-12T12:37:17-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 -u db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 -r /usr/bin/runc -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata -p /run/user/3001/containers/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata/pidfile -n 49a038584fa1-infra --exit-dir /run/user/3001/libpod/tmp/exits --full-attach -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg cgroupfs --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70]" time="2025-07-12T12:37:17-04:00" level=info msg="Failed to add conmon to cgroupfs sandbox cgroup: creating cgroup for cpu: mkdir /sys/fs/cgroup/cpu/libpod_parent: permission denied" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2025-07-12T12:37:17-04:00" level=debug msg="Received: 26787" time="2025-07-12T12:37:17-04:00" level=info msg="Got Conmon PID as 26777" time="2025-07-12T12:37:17-04:00" level=debug msg="Created container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 in OCI runtime" time="2025-07-12T12:37:17-04:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-07-12T12:37:17-04:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-07-12T12:37:17-04:00" level=debug msg="Starting container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 with command [/catatonit -P]" time="2025-07-12T12:37:17-04:00" level=debug msg="Started container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70" time="2025-07-12T12:37:17-04:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/SGU47AVGSROXANDACX3GODEDPF,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/1f9869d8c46f1dd99255378c38742ad9514c5c2f608c50250883573ed9f20064/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/1f9869d8c46f1dd99255378c38742ad9514c5c2f608c50250883573ed9f20064/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c277,c351\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Mounted container \"8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/1f9869d8c46f1dd99255378c38742ad9514c5c2f608c50250883573ed9f20064/merged\"" time="2025-07-12T12:37:17-04:00" level=debug msg="Created root filesystem for container 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 at /home/podman_basic_user/.local/share/containers/storage/overlay/1f9869d8c46f1dd99255378c38742ad9514c5c2f608c50250883573ed9f20064/merged" time="2025-07-12T12:37:17-04:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2025-07-12T12:37:17-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-07-12T12:37:17-04:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2025-07-12T12:37:17-04:00" level=debug msg="Created OCI spec for container 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata/config.json" time="2025-07-12T12:37:17-04:00" level=debug msg="Got pod cgroup as " time="2025-07-12T12:37:17-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-07-12T12:37:17-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 -u 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 -r /usr/bin/runc -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata -p /run/user/3001/containers/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata/pidfile -n httpd1-httpd1 --exit-dir /run/user/3001/libpod/tmp/exits --full-attach -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg cgroupfs --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057]" time="2025-07-12T12:37:17-04:00" level=info msg="Failed to add conmon to cgroupfs sandbox cgroup: creating cgroup for cpu: mkdir /sys/fs/cgroup/cpu/conmon: permission denied" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2025-07-12T12:37:17-04:00" level=debug msg="Received: 26808" time="2025-07-12T12:37:17-04:00" level=info msg="Got Conmon PID as 26798" time="2025-07-12T12:37:17-04:00" level=debug msg="Created container 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 in OCI runtime" time="2025-07-12T12:37:17-04:00" level=debug msg="Starting container 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 with command [/bin/busybox-extras httpd -f -p 80]" time="2025-07-12T12:37:17-04:00" level=debug msg="Started container 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057" time="2025-07-12T12:37:17-04:00" level=debug msg="Called kube.PersistentPostRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-07-12T12:37:17-04:00" level=debug msg="Shutting down engines" Jul 12 12:37:17 managed-node2 platform-python[26566]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jul 12 12:37:17 managed-node2 sudo[26563]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:37:18 managed-node2 sudo[26939]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yxngcsmbouppolsnchwedyvvmqwcqmcp ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338237.9288857-15948-265909207589811/AnsiballZ_systemd.py' Jul 12 12:37:18 managed-node2 sudo[26939]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:37:18 managed-node2 platform-python[26942]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Jul 12 12:37:18 managed-node2 systemd[25539]: Reloading. Jul 12 12:37:18 managed-node2 sudo[26939]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:37:18 managed-node2 sudo[27076]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jxciagckwyaiwverlxxxicxpjcaamzpb ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338238.5066783-15974-208582760237043/AnsiballZ_systemd.py' Jul 12 12:37:18 managed-node2 sudo[27076]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:37:18 managed-node2 dnsmasq[26762]: listening on cni-podman1(#3): fe80::8fa:9aff:fe36:b83a%cni-podman1 Jul 12 12:37:18 managed-node2 platform-python[27079]: ansible-systemd Invoked with name= scope=user enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None user=None Jul 12 12:37:18 managed-node2 systemd[25539]: Reloading. Jul 12 12:37:18 managed-node2 sudo[27076]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:37:19 managed-node2 sudo[27215]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-aeingsotugnwsviddfzcxglibrontkpg ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338239.1567352-16007-20750914035253/AnsiballZ_systemd.py' Jul 12 12:37:19 managed-node2 sudo[27215]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:37:19 managed-node2 platform-python[27218]: ansible-systemd Invoked with name= scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None Jul 12 12:37:19 managed-node2 systemd[25539]: Created slice podman\x2dkube.slice. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:19 managed-node2 systemd[25539]: Starting A template for running K8s workloads via podman-kube-play... -- Subject: Unit UNIT has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun starting up. Jul 12 12:37:19 managed-node2 conmon[26798]: conmon 8b812a2ec55f9de0cde0 : container 26808 exited with status 137 Jul 12 12:37:19 managed-node2 conmon[26777]: conmon db962b9f1559ffd15c96 : container 26787 exited with status 137 Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057)" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=info msg="Using sqlite as database backend" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70)" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=info msg="Using sqlite as database backend" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using graph driver overlay" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using run root /run/user/3001/containers" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using transient store: false" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Cached value indicated that metacopy is not being used" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Cached value indicated that native-diff is usable" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Initializing event backend file" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=info msg="Setting parallel job count to 7" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using graph driver overlay" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using run root /run/user/3001/containers" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using transient store: false" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Cached value indicated that metacopy is not being used" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Cached value indicated that native-diff is usable" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Initializing event backend file" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=info msg="Setting parallel job count to 7" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057)" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time="2025-07-12T12:37:19-04:00" level=debug msg="Shutting down engines" Jul 12 12:37:19 managed-node2 kernel: cni-podman1: port 1(veth14aad36c) entered disabled state Jul 12 12:37:19 managed-node2 kernel: device veth14aad36c left promiscuous mode Jul 12 12:37:19 managed-node2 kernel: cni-podman1: port 1(veth14aad36c) entered disabled state Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70)" Jul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time="2025-07-12T12:37:19-04:00" level=debug msg="Shutting down engines" Jul 12 12:37:19 managed-node2 podman[27224]: Pods stopped: Jul 12 12:37:19 managed-node2 podman[27224]: 49a038584fa170b91928655aabc3334b950d706880329a96a99914f26bc7c59d Jul 12 12:37:19 managed-node2 podman[27224]: Pods removed: Jul 12 12:37:19 managed-node2 podman[27224]: 49a038584fa170b91928655aabc3334b950d706880329a96a99914f26bc7c59d Jul 12 12:37:19 managed-node2 podman[27224]: Secrets removed: Jul 12 12:37:19 managed-node2 podman[27224]: Volumes removed: Jul 12 12:37:20 managed-node2 systemd[25539]: Started rootless-netns-910042d3.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:20 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): veth73ffc199: link is not ready Jul 12 12:37:20 managed-node2 kernel: cni-podman1: port 1(veth73ffc199) entered blocking state Jul 12 12:37:20 managed-node2 kernel: cni-podman1: port 1(veth73ffc199) entered disabled state Jul 12 12:37:20 managed-node2 kernel: device veth73ffc199 entered promiscuous mode Jul 12 12:37:20 managed-node2 kernel: cni-podman1: port 1(veth73ffc199) entered blocking state Jul 12 12:37:20 managed-node2 kernel: cni-podman1: port 1(veth73ffc199) entered forwarding state Jul 12 12:37:20 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth73ffc199: link becomes ready Jul 12 12:37:20 managed-node2 dnsmasq[27470]: listening on cni-podman1(#3): 10.89.0.1 Jul 12 12:37:20 managed-node2 dnsmasq[27472]: started, version 2.79 cachesize 150 Jul 12 12:37:20 managed-node2 dnsmasq[27472]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Jul 12 12:37:20 managed-node2 dnsmasq[27472]: using local addresses only for domain dns.podman Jul 12 12:37:20 managed-node2 dnsmasq[27472]: reading /etc/resolv.conf Jul 12 12:37:20 managed-node2 dnsmasq[27472]: using local addresses only for domain dns.podman Jul 12 12:37:20 managed-node2 dnsmasq[27472]: using nameserver 10.0.2.3#53 Jul 12 12:37:20 managed-node2 dnsmasq[27472]: using nameserver 10.29.169.13#53 Jul 12 12:37:20 managed-node2 dnsmasq[27472]: using nameserver 10.29.170.12#53 Jul 12 12:37:20 managed-node2 dnsmasq[27472]: using nameserver 10.2.32.1#53 Jul 12 12:37:20 managed-node2 dnsmasq[27472]: read /run/user/3001/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Jul 12 12:37:20 managed-node2 podman[27224]: Pod: Jul 12 12:37:20 managed-node2 podman[27224]: e6000b000d16f866b55a1a19dd3fb3262c82c732a816136db57b7e8368f0a32a Jul 12 12:37:20 managed-node2 podman[27224]: Container: Jul 12 12:37:20 managed-node2 podman[27224]: 3e84611729acf9a795f4d6223da39f911f01d8e5bb78d05b15144b66878ad807 Jul 12 12:37:20 managed-node2 systemd[25539]: Started A template for running K8s workloads via podman-kube-play. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:20 managed-node2 sudo[27215]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:37:21 managed-node2 platform-python[27649]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 12 12:37:21 managed-node2 platform-python[27773]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:37:22 managed-node2 dnsmasq[27472]: listening on cni-podman1(#3): fe80::c95:b4ff:fe67:d35c%cni-podman1 Jul 12 12:37:23 managed-node2 platform-python[27898]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:24 managed-node2 platform-python[28022]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:37:24 managed-node2 platform-python[28145]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:37:25 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:37:25 managed-node2 platform-python[28435]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:37:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:37:26 managed-node2 platform-python[28558]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:37:26 managed-node2 platform-python[28681]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 12 12:37:27 managed-node2 platform-python[28780]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752338246.5734363-16367-230792965661198/source _original_basename=tmpcx3lufsl follow=False checksum=d1d2b75756121a76b51c55942528a638a8e19d00 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Jul 12 12:37:27 managed-node2 platform-python[28905]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:37:27 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice. -- Subject: Unit machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice has finished starting up. -- -- The start-up result is done. Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8210] manager: (cni-podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Jul 12 12:37:27 managed-node2 systemd-udevd[28952]: Using default interface naming scheme 'rhel-8.0'. Jul 12 12:37:27 managed-node2 systemd-udevd[28953]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Jul 12 12:37:27 managed-node2 systemd-udevd[28953]: Could not generate persistent MAC address for vetha808c72b: No such file or directory Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8294] manager: (vetha808c72b): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Jul 12 12:37:27 managed-node2 systemd-udevd[28952]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Jul 12 12:37:27 managed-node2 systemd-udevd[28952]: Could not generate persistent MAC address for cni-podman1: No such file or directory Jul 12 12:37:27 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vetha808c72b: link is not ready Jul 12 12:37:27 managed-node2 kernel: cni-podman1: port 1(vetha808c72b) entered blocking state Jul 12 12:37:27 managed-node2 kernel: cni-podman1: port 1(vetha808c72b) entered disabled state Jul 12 12:37:27 managed-node2 kernel: device vetha808c72b entered promiscuous mode Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8417] device (cni-podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', sys-iface-state: 'external') Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8423] device (cni-podman1): state change: unavailable -> disconnected (reason 'connection-assumed', sys-iface-state: 'external') Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8433] device (cni-podman1): Activation: starting connection 'cni-podman1' (9399044c-ebcb-4319-aff1-7a172e94e2ea) Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8434] device (cni-podman1): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'external') Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8436] device (cni-podman1): state change: prepare -> config (reason 'none', sys-iface-state: 'external') Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8438] device (cni-podman1): state change: config -> ip-config (reason 'none', sys-iface-state: 'external') Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8440] device (cni-podman1): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'external') Jul 12 12:37:27 managed-node2 dbus-daemon[601]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.5' (uid=0 pid=666 comm="/usr/sbin/NetworkManager --no-daemon " label="system_u:system_r:NetworkManager_t:s0") Jul 12 12:37:27 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready Jul 12 12:37:27 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready Jul 12 12:37:27 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vetha808c72b: link becomes ready Jul 12 12:37:27 managed-node2 kernel: cni-podman1: port 1(vetha808c72b) entered blocking state Jul 12 12:37:27 managed-node2 kernel: cni-podman1: port 1(vetha808c72b) entered forwarding state Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8634] device (vetha808c72b): carrier: link connected Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8637] device (cni-podman1): carrier: link connected Jul 12 12:37:27 managed-node2 systemd[1]: Starting Network Manager Script Dispatcher Service... -- Subject: Unit NetworkManager-dispatcher.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has begun starting up. Jul 12 12:37:27 managed-node2 dbus-daemon[601]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher' Jul 12 12:37:27 managed-node2 systemd[1]: Started Network Manager Script Dispatcher Service. -- Subject: Unit NetworkManager-dispatcher.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has finished starting up. -- -- The start-up result is done. Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.9275] device (cni-podman1): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'external') Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.9277] device (cni-podman1): state change: secondaries -> activated (reason 'none', sys-iface-state: 'external') Jul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.9282] device (cni-podman1): Activation: successful, device activated. Jul 12 12:37:28 managed-node2 dnsmasq[29076]: listening on cni-podman1(#3): 10.89.0.1 Jul 12 12:37:28 managed-node2 dnsmasq[29080]: started, version 2.79 cachesize 150 Jul 12 12:37:28 managed-node2 dnsmasq[29080]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Jul 12 12:37:28 managed-node2 dnsmasq[29080]: using local addresses only for domain dns.podman Jul 12 12:37:28 managed-node2 dnsmasq[29080]: reading /etc/resolv.conf Jul 12 12:37:28 managed-node2 dnsmasq[29080]: using local addresses only for domain dns.podman Jul 12 12:37:28 managed-node2 dnsmasq[29080]: using nameserver 10.29.169.13#53 Jul 12 12:37:28 managed-node2 dnsmasq[29080]: using nameserver 10.29.170.12#53 Jul 12 12:37:28 managed-node2 dnsmasq[29080]: using nameserver 10.2.32.1#53 Jul 12 12:37:28 managed-node2 dnsmasq[29080]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Jul 12 12:37:28 managed-node2 systemd[1]: Started libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope. -- Subject: Unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:28 managed-node2 conmon[29082]: conmon 2bba312ac73f9a5e5631 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Jul 12 12:37:28 managed-node2 conmon[29082]: conmon 2bba312ac73f9a5e5631 : terminal_ctrl_fd: 13 Jul 12 12:37:28 managed-node2 conmon[29082]: conmon 2bba312ac73f9a5e5631 : winsz read side: 17, winsz write side: 18 Jul 12 12:37:28 managed-node2 systemd[1]: Started libcontainer container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07. -- Subject: Unit libpod-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:28 managed-node2 conmon[29082]: conmon 2bba312ac73f9a5e5631 : container PID: 29092 Jul 12 12:37:28 managed-node2 systemd[1]: Started libpod-conmon-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope. -- Subject: Unit libpod-conmon-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:28 managed-node2 conmon[29103]: conmon dde5d521943a5269ac5d : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach} Jul 12 12:37:28 managed-node2 conmon[29103]: conmon dde5d521943a5269ac5d : terminal_ctrl_fd: 12 Jul 12 12:37:28 managed-node2 conmon[29103]: conmon dde5d521943a5269ac5d : winsz read side: 16, winsz write side: 17 Jul 12 12:37:28 managed-node2 systemd[1]: Started libcontainer container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0. -- Subject: Unit libpod-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:28 managed-node2 conmon[29103]: conmon dde5d521943a5269ac5d : container PID: 29114 Jul 12 12:37:28 managed-node2 platform-python[28905]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Jul 12 12:37:28 managed-node2 platform-python[28905]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd Container: dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 Jul 12 12:37:28 managed-node2 platform-python[28905]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-07-12T12:37:27-04:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2025-07-12T12:37:27-04:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-07-12T12:37:27-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-07-12T12:37:27-04:00" level=info msg="Using sqlite as database backend" time="2025-07-12T12:37:27-04:00" level=debug msg="Using graph driver overlay" time="2025-07-12T12:37:27-04:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Using run root /run/containers/storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2025-07-12T12:37:27-04:00" level=debug msg="Using tmp dir /run/libpod" time="2025-07-12T12:37:27-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2025-07-12T12:37:27-04:00" level=debug msg="Using transient store: false" time="2025-07-12T12:37:27-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-12T12:37:27-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-12T12:37:27-04:00" level=debug msg="Cached value indicated that metacopy is being used" time="2025-07-12T12:37:27-04:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2025-07-12T12:37:27-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2025-07-12T12:37:27-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2025-07-12T12:37:27-04:00" level=debug msg="Initializing event backend file" time="2025-07-12T12:37:27-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-07-12T12:37:27-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-07-12T12:37:27-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-07-12T12:37:27-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-07-12T12:37:27-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-07-12T12:37:27-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-07-12T12:37:27-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-07-12T12:37:27-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" time="2025-07-12T12:37:27-04:00" level=info msg="Setting parallel job count to 7" time="2025-07-12T12:37:27-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-12 12:34:58.774465298 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-07-12T12:37:27-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-07-12T12:37:27-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:27-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-12T12:37:27-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2)" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Pod using bridge network mode" time="2025-07-12T12:37:27-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice for parent machine.slice and name libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd" time="2025-07-12T12:37:27-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice" time="2025-07-12T12:37:27-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice" time="2025-07-12T12:37:27-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:27-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-12T12:37:27-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2)" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Inspecting image 4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Inspecting image 4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2" time="2025-07-12T12:37:27-04:00" level=debug msg="Inspecting image 4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2" time="2025-07-12T12:37:27-04:00" level=debug msg="using systemd mode: false" time="2025-07-12T12:37:27-04:00" level=debug msg="setting container name a247d85c3822-infra" time="2025-07-12T12:37:27-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Allocated lock 1 for container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07" time="2025-07-12T12:37:27-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\"" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2025-07-12T12:37:27-04:00" level=debug msg="Check for idmapped mounts support " time="2025-07-12T12:37:27-04:00" level=debug msg="Created container \"2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Container \"2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\" has work directory \"/var/lib/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Container \"2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\" has run directory \"/run/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:27-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-12T12:37:27-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-07-12T12:37:27-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:27-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-12T12:37:27-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:27-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-12T12:37:27-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:37:27-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-12T12:37:27-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-12T12:37:27-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-12T12:37:27-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-12T12:37:27-04:00" level=debug msg="using systemd mode: false" time="2025-07-12T12:37:27-04:00" level=debug msg="adding container to pod httpd2" time="2025-07-12T12:37:27-04:00" level=debug msg="setting container name httpd2-httpd2" time="2025-07-12T12:37:27-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-07-12T12:37:27-04:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-07-12T12:37:27-04:00" level=debug msg="Adding mount /proc" time="2025-07-12T12:37:27-04:00" level=debug msg="Adding mount /dev" time="2025-07-12T12:37:27-04:00" level=debug msg="Adding mount /dev/pts" time="2025-07-12T12:37:27-04:00" level=debug msg="Adding mount /dev/mqueue" time="2025-07-12T12:37:27-04:00" level=debug msg="Adding mount /sys" time="2025-07-12T12:37:27-04:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-07-12T12:37:27-04:00" level=debug msg="Allocated lock 2 for container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0" time="2025-07-12T12:37:27-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Created container \"dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Container \"dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\" has work directory \"/var/lib/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Container \"dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\" has run directory \"/run/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Strongconnecting node dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0" time="2025-07-12T12:37:27-04:00" level=debug msg="Pushed dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 onto stack" time="2025-07-12T12:37:27-04:00" level=debug msg="Recursing to successor node 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07" time="2025-07-12T12:37:27-04:00" level=debug msg="Strongconnecting node 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07" time="2025-07-12T12:37:27-04:00" level=debug msg="Pushed 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 onto stack" time="2025-07-12T12:37:27-04:00" level=debug msg="Finishing node 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07. Popped 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 off stack" time="2025-07-12T12:37:27-04:00" level=debug msg="Finishing node dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0. Popped dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 off stack" time="2025-07-12T12:37:27-04:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/BPQ67IPF3U2MS7MKOAJ6EE5AVL,upperdir=/var/lib/containers/storage/overlay/110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0/diff,workdir=/var/lib/containers/storage/overlay/110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c20,c130\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Mounted container \"2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\" at \"/var/lib/containers/storage/overlay/110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0/merged\"" time="2025-07-12T12:37:27-04:00" level=debug msg="Created root filesystem for container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 at /var/lib/containers/storage/overlay/110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0/merged" time="2025-07-12T12:37:27-04:00" level=debug msg="Made network namespace at /run/netns/netns-93660061-5819-4d54-dfec-784d954efe33 for container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07" time="2025-07-12T12:37:28-04:00" level=debug msg="cni result for container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 network podman-default-kube-network: &{0.4.0 [{Name:cni-podman1 Mac:7e:63:02:ee:ed:5c Sandbox:} {Name:vetha808c72b Mac:8a:e4:ca:d3:1c:60 Sandbox:} {Name:eth0 Mac:f2:ab:50:c0:43:48 Sandbox:/run/netns/netns-93660061-5819-4d54-dfec-784d954efe33}] [{Version:4 Interface:0xc0006632b8 Address:{IP:10.89.0.2 Mask:ffffff00} Gateway:10.89.0.1}] [{Dst:{IP:0.0.0.0 Mask:00000000} GW:}] {[10.89.0.1] [dns.podman] []}}" time="2025-07-12T12:37:28-04:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2025-07-12T12:37:28-04:00" level=debug msg="Setting Cgroups for container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 to machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice:libpod:2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07" time="2025-07-12T12:37:28-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-07-12T12:37:28-04:00" level=debug msg="Workdir \"/\" resolved to host path \"/var/lib/containers/storage/overlay/110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0/merged\"" time="2025-07-12T12:37:28-04:00" level=debug msg="Created OCI spec for container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 at /var/lib/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata/config.json" time="2025-07-12T12:37:28-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice for parent machine.slice and name libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd" time="2025-07-12T12:37:28-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice" time="2025-07-12T12:37:28-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice" time="2025-07-12T12:37:28-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-07-12T12:37:28-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 -u 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 -r /usr/bin/runc -b /var/lib/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata -p /run/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata/pidfile -n a247d85c3822-infra --exit-dir /run/libpod/exits --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07]" time="2025-07-12T12:37:28-04:00" level=info msg="Running conmon under slice machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice and unitName libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope" time="2025-07-12T12:37:28-04:00" level=debug msg="Received: 29092" time="2025-07-12T12:37:28-04:00" level=info msg="Got Conmon PID as 29082" time="2025-07-12T12:37:28-04:00" level=debug msg="Created container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 in OCI runtime" time="2025-07-12T12:37:28-04:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-07-12T12:37:28-04:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-07-12T12:37:28-04:00" level=debug msg="Starting container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 with command [/catatonit -P]" time="2025-07-12T12:37:28-04:00" level=debug msg="Started container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07" time="2025-07-12T12:37:28-04:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/UMCCOJYMJQIWGK7MOUSAJGNIT3,upperdir=/var/lib/containers/storage/overlay/4d39b8c35596ee353ef440ad2f35ea10e1beaa01c7e9bc9a2362e309c3d6bc72/diff,workdir=/var/lib/containers/storage/overlay/4d39b8c35596ee353ef440ad2f35ea10e1beaa01c7e9bc9a2362e309c3d6bc72/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c20,c130\"" time="2025-07-12T12:37:28-04:00" level=debug msg="Mounted container \"dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\" at \"/var/lib/containers/storage/overlay/4d39b8c35596ee353ef440ad2f35ea10e1beaa01c7e9bc9a2362e309c3d6bc72/merged\"" time="2025-07-12T12:37:28-04:00" level=debug msg="Created root filesystem for container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 at /var/lib/containers/storage/overlay/4d39b8c35596ee353ef440ad2f35ea10e1beaa01c7e9bc9a2362e309c3d6bc72/merged" time="2025-07-12T12:37:28-04:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2025-07-12T12:37:28-04:00" level=debug msg="Setting Cgroups for container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 to machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice:libpod:dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0" time="2025-07-12T12:37:28-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-07-12T12:37:28-04:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2025-07-12T12:37:28-04:00" level=debug msg="Created OCI spec for container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 at /var/lib/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata/config.json" time="2025-07-12T12:37:28-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice for parent machine.slice and name libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd" time="2025-07-12T12:37:28-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice" time="2025-07-12T12:37:28-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice" time="2025-07-12T12:37:28-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-07-12T12:37:28-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 -u dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 -r /usr/bin/runc -b /var/lib/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata -p /run/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0]" time="2025-07-12T12:37:28-04:00" level=info msg="Running conmon under slice machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice and unitName libpod-conmon-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope" time="2025-07-12T12:37:28-04:00" level=debug msg="Received: 29114" time="2025-07-12T12:37:28-04:00" level=info msg="Got Conmon PID as 29103" time="2025-07-12T12:37:28-04:00" level=debug msg="Created container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 in OCI runtime" time="2025-07-12T12:37:28-04:00" level=debug msg="Starting container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 with command [/bin/busybox-extras httpd -f -p 80]" time="2025-07-12T12:37:28-04:00" level=debug msg="Started container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0" time="2025-07-12T12:37:28-04:00" level=debug msg="Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-07-12T12:37:28-04:00" level=debug msg="Shutting down engines" Jul 12 12:37:28 managed-node2 platform-python[28905]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jul 12 12:37:28 managed-node2 platform-python[29245]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Jul 12 12:37:28 managed-node2 systemd[1]: Reloading. Jul 12 12:37:29 managed-node2 platform-python[29406]: ansible-systemd Invoked with name= scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None user=None Jul 12 12:37:29 managed-node2 systemd[1]: Reloading. Jul 12 12:37:29 managed-node2 dnsmasq[29080]: listening on cni-podman1(#3): fe80::7c63:2ff:feee:ed5c%cni-podman1 Jul 12 12:37:30 managed-node2 platform-python[29569]: ansible-systemd Invoked with name= scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None Jul 12 12:37:30 managed-node2 systemd[1]: Created slice system-podman\x2dkube.slice. -- Subject: Unit system-podman\x2dkube.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit system-podman\x2dkube.slice has finished starting up. -- -- The start-up result is done. Jul 12 12:37:30 managed-node2 systemd[1]: Starting A template for running K8s workloads via podman-kube-play... -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun starting up. Jul 12 12:37:30 managed-node2 conmon[29082]: conmon 2bba312ac73f9a5e5631 : container 29092 exited with status 137 Jul 12 12:37:30 managed-node2 systemd[1]: libpod-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has successfully entered the 'dead' state. Jul 12 12:37:30 managed-node2 systemd[1]: libpod-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope: Consumed 31ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope completed and consumed the indicated resources. Jul 12 12:37:30 managed-node2 conmon[29103]: conmon dde5d521943a5269ac5d : container 29114 exited with status 137 Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07)" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jul 12 12:37:30 managed-node2 systemd[1]: libpod-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope has successfully entered the 'dead' state. Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jul 12 12:37:30 managed-node2 systemd[1]: libpod-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope: Consumed 32ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope completed and consumed the indicated resources. Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=info msg="Using sqlite as database backend" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using graph driver overlay" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using graph root /var/lib/containers/storage" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using run root /run/containers/storage" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using tmp dir /run/libpod" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using transient store: false" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Cached value indicated that metacopy is being used" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Cached value indicated that native-diff is not being used" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Initializing event backend file" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=info msg="Setting parallel job count to 7" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0)" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=info msg="Using sqlite as database backend" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using graph driver overlay" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using graph root /var/lib/containers/storage" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using run root /run/containers/storage" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using tmp dir /run/libpod" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using transient store: false" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Cached value indicated that metacopy is being used" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Cached value indicated that native-diff is not being used" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Initializing event backend file" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=info msg="Setting parallel job count to 7" Jul 12 12:37:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay-4d39b8c35596ee353ef440ad2f35ea10e1beaa01c7e9bc9a2362e309c3d6bc72-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-4d39b8c35596ee353ef440ad2f35ea10e1beaa01c7e9bc9a2362e309c3d6bc72-merged.mount has successfully entered the 'dead' state. Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0)" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time="2025-07-12T12:37:30-04:00" level=debug msg="Shutting down engines" Jul 12 12:37:30 managed-node2 systemd[1]: libpod-conmon-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope has successfully entered the 'dead' state. Jul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vetha808c72b) entered disabled state Jul 12 12:37:30 managed-node2 kernel: device vetha808c72b left promiscuous mode Jul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vetha808c72b) entered disabled state Jul 12 12:37:30 managed-node2 systemd[1]: run-netns-netns\x2d93660061\x2d5819\x2d4d54\x2ddfec\x2d784d954efe33.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-netns-netns\x2d93660061\x2d5819\x2d4d54\x2ddfec\x2d784d954efe33.mount has successfully entered the 'dead' state. Jul 12 12:37:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07-userdata-shm.mount has successfully entered the 'dead' state. Jul 12 12:37:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay-110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0-merged.mount has successfully entered the 'dead' state. Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07)" Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=debug msg="Shutting down engines" Jul 12 12:37:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:37:30 managed-node2 systemd[1]: Stopping libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope. -- Subject: Unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has begun shutting down. Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=info msg="Received shutdown signal \"terminated\", terminating!" PID=29592 Jul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time="2025-07-12T12:37:30-04:00" level=info msg="Invoking shutdown handler \"libpod\"" PID=29592 Jul 12 12:37:30 managed-node2 systemd[1]: libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has successfully entered the 'dead' state. Jul 12 12:37:30 managed-node2 systemd[1]: Stopped libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope. -- Subject: Unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has finished shutting down. Jul 12 12:37:30 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice. -- Subject: Unit machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice has finished shutting down. Jul 12 12:37:30 managed-node2 systemd[1]: machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice: Consumed 212ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice completed and consumed the indicated resources. Jul 12 12:37:30 managed-node2 podman[29576]: Pods stopped: Jul 12 12:37:30 managed-node2 podman[29576]: a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd Jul 12 12:37:30 managed-node2 podman[29576]: Pods removed: Jul 12 12:37:30 managed-node2 podman[29576]: a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd Jul 12 12:37:30 managed-node2 podman[29576]: Secrets removed: Jul 12 12:37:30 managed-node2 podman[29576]: Volumes removed: Jul 12 12:37:30 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice. -- Subject: Unit machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice has finished starting up. -- -- The start-up result is done. Jul 12 12:37:30 managed-node2 systemd[1]: Started libcontainer container af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315. -- Subject: Unit libpod-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:30 managed-node2 systemd-udevd[29733]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Jul 12 12:37:30 managed-node2 systemd-udevd[29733]: Could not generate persistent MAC address for vethec9deee2: No such file or directory Jul 12 12:37:30 managed-node2 NetworkManager[666]: [1752338250.7637] manager: (vethec9deee2): new Veth device (/org/freedesktop/NetworkManager/Devices/5) Jul 12 12:37:30 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vethec9deee2: link is not ready Jul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered blocking state Jul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered disabled state Jul 12 12:37:30 managed-node2 kernel: device vethec9deee2 entered promiscuous mode Jul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered blocking state Jul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered forwarding state Jul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered disabled state Jul 12 12:37:30 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vethec9deee2: link becomes ready Jul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered blocking state Jul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered forwarding state Jul 12 12:37:30 managed-node2 NetworkManager[666]: [1752338250.7897] device (vethec9deee2): carrier: link connected Jul 12 12:37:30 managed-node2 NetworkManager[666]: [1752338250.7911] device (cni-podman1): carrier: link connected Jul 12 12:37:30 managed-node2 dnsmasq[29803]: listening on cni-podman1(#3): 10.89.0.1 Jul 12 12:37:30 managed-node2 dnsmasq[29803]: listening on cni-podman1(#3): fe80::7c63:2ff:feee:ed5c%cni-podman1 Jul 12 12:37:30 managed-node2 dnsmasq[29808]: started, version 2.79 cachesize 150 Jul 12 12:37:30 managed-node2 dnsmasq[29808]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Jul 12 12:37:30 managed-node2 dnsmasq[29808]: using local addresses only for domain dns.podman Jul 12 12:37:30 managed-node2 dnsmasq[29808]: reading /etc/resolv.conf Jul 12 12:37:30 managed-node2 dnsmasq[29808]: using local addresses only for domain dns.podman Jul 12 12:37:30 managed-node2 dnsmasq[29808]: using nameserver 10.29.169.13#53 Jul 12 12:37:30 managed-node2 dnsmasq[29808]: using nameserver 10.29.170.12#53 Jul 12 12:37:30 managed-node2 dnsmasq[29808]: using nameserver 10.2.32.1#53 Jul 12 12:37:30 managed-node2 dnsmasq[29808]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Jul 12 12:37:30 managed-node2 systemd[1]: Started libcontainer container 39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a. -- Subject: Unit libpod-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:31 managed-node2 systemd[1]: Started libcontainer container fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149. -- Subject: Unit libpod-fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:31 managed-node2 podman[29576]: Pod: Jul 12 12:37:31 managed-node2 podman[29576]: ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5 Jul 12 12:37:31 managed-node2 podman[29576]: Container: Jul 12 12:37:31 managed-node2 podman[29576]: fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149 Jul 12 12:37:31 managed-node2 systemd[1]: Started A template for running K8s workloads via podman-kube-play. -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished starting up. -- -- The start-up result is done. Jul 12 12:37:31 managed-node2 platform-python[29974]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:37:33 managed-node2 platform-python[30107]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:34 managed-node2 platform-python[30231]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:37:34 managed-node2 platform-python[30354]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:37:36 managed-node2 platform-python[30643]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:37:36 managed-node2 platform-python[30766]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:37:37 managed-node2 platform-python[30889]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 12 12:37:37 managed-node2 platform-python[30988]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752338256.766105-16794-202828239900759/source _original_basename=tmpvj89f27p follow=False checksum=92197531821af6a866eb3c8d736aa33d00262127 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Jul 12 12:37:37 managed-node2 platform-python[31113]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:37:37 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice. -- Subject: Unit machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice has finished starting up. -- -- The start-up result is done. Jul 12 12:37:38 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jul 12 12:37:38 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vetha724e550: link is not ready Jul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered blocking state Jul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered disabled state Jul 12 12:37:38 managed-node2 kernel: device vetha724e550 entered promiscuous mode Jul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered blocking state Jul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered forwarding state Jul 12 12:37:38 managed-node2 NetworkManager[666]: [1752338258.0378] manager: (vetha724e550): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Jul 12 12:37:38 managed-node2 systemd-udevd[31161]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Jul 12 12:37:38 managed-node2 systemd-udevd[31161]: Could not generate persistent MAC address for vetha724e550: No such file or directory Jul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered disabled state Jul 12 12:37:38 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready Jul 12 12:37:38 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready Jul 12 12:37:38 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vetha724e550: link becomes ready Jul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered blocking state Jul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered forwarding state Jul 12 12:37:38 managed-node2 NetworkManager[666]: [1752338258.0795] device (vetha724e550): carrier: link connected Jul 12 12:37:38 managed-node2 dnsmasq[29808]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 2 addresses Jul 12 12:37:38 managed-node2 systemd[1]: Started libpod-conmon-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope. -- Subject: Unit libpod-conmon-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:38 managed-node2 systemd[1]: Started libcontainer container 8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136. -- Subject: Unit libpod-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:38 managed-node2 systemd[1]: Started libpod-conmon-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope. -- Subject: Unit libpod-conmon-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:38 managed-node2 systemd[1]: Started libcontainer container 239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8. -- Subject: Unit libpod-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:38 managed-node2 platform-python[31394]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Jul 12 12:37:38 managed-node2 systemd[1]: Reloading. Jul 12 12:37:39 managed-node2 platform-python[31555]: ansible-systemd Invoked with name= scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None user=None Jul 12 12:37:39 managed-node2 systemd[1]: Reloading. Jul 12 12:37:40 managed-node2 platform-python[31710]: ansible-systemd Invoked with name= scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None Jul 12 12:37:40 managed-node2 systemd[1]: Starting A template for running K8s workloads via podman-kube-play... -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun starting up. Jul 12 12:37:40 managed-node2 systemd[1]: libpod-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope has successfully entered the 'dead' state. Jul 12 12:37:40 managed-node2 systemd[1]: libpod-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope: Consumed 31ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope completed and consumed the indicated resources. Jul 12 12:37:40 managed-node2 systemd[1]: libpod-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope has successfully entered the 'dead' state. Jul 12 12:37:40 managed-node2 systemd[1]: libpod-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope: Consumed 32ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope completed and consumed the indicated resources. Jul 12 12:37:40 managed-node2 dnsmasq[29808]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Jul 12 12:37:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay-719154c260667d3aa74578747f416c045e6c4537dd0a7c671adf4544cf226e68-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-719154c260667d3aa74578747f416c045e6c4537dd0a7c671adf4544cf226e68-merged.mount has successfully entered the 'dead' state. Jul 12 12:37:40 managed-node2 systemd[1]: libpod-conmon-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope has successfully entered the 'dead' state. Jul 12 12:37:40 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered disabled state Jul 12 12:37:40 managed-node2 kernel: device vetha724e550 left promiscuous mode Jul 12 12:37:40 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered disabled state Jul 12 12:37:40 managed-node2 systemd[1]: run-netns-netns\x2d1bb9153f\x2df22a\x2dcc5d\x2d3c7a\x2dd87e5ee733ce.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-netns-netns\x2d1bb9153f\x2df22a\x2dcc5d\x2d3c7a\x2dd87e5ee733ce.mount has successfully entered the 'dead' state. Jul 12 12:37:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136-userdata-shm.mount has successfully entered the 'dead' state. Jul 12 12:37:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay-eb2787269c2e2cd7be423803b1667df0aa39556214229872d965cd9cab309419-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-eb2787269c2e2cd7be423803b1667df0aa39556214229872d965cd9cab309419-merged.mount has successfully entered the 'dead' state. Jul 12 12:37:40 managed-node2 systemd[1]: libpod-conmon-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope has successfully entered the 'dead' state. Jul 12 12:37:40 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice. -- Subject: Unit machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice has finished shutting down. Jul 12 12:37:40 managed-node2 systemd[1]: machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice: Consumed 199ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice completed and consumed the indicated resources. Jul 12 12:37:40 managed-node2 podman[31717]: Pods stopped: Jul 12 12:37:40 managed-node2 podman[31717]: 537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583 Jul 12 12:37:40 managed-node2 podman[31717]: Pods removed: Jul 12 12:37:40 managed-node2 podman[31717]: 537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583 Jul 12 12:37:40 managed-node2 podman[31717]: Secrets removed: Jul 12 12:37:40 managed-node2 podman[31717]: Volumes removed: Jul 12 12:37:40 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice. -- Subject: Unit machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice has finished starting up. -- -- The start-up result is done. Jul 12 12:37:40 managed-node2 systemd[1]: Started libcontainer container 7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b. -- Subject: Unit libpod-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:40 managed-node2 NetworkManager[666]: [1752338260.9491] manager: (veth3fe74d71): new Veth device (/org/freedesktop/NetworkManager/Devices/7) Jul 12 12:37:40 managed-node2 systemd-udevd[31882]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Jul 12 12:37:40 managed-node2 systemd-udevd[31882]: Could not generate persistent MAC address for veth3fe74d71: No such file or directory Jul 12 12:37:40 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): veth3fe74d71: link is not ready Jul 12 12:37:40 managed-node2 kernel: cni-podman1: port 2(veth3fe74d71) entered blocking state Jul 12 12:37:40 managed-node2 kernel: cni-podman1: port 2(veth3fe74d71) entered disabled state Jul 12 12:37:40 managed-node2 kernel: device veth3fe74d71 entered promiscuous mode Jul 12 12:37:40 managed-node2 kernel: cni-podman1: port 2(veth3fe74d71) entered blocking state Jul 12 12:37:40 managed-node2 kernel: cni-podman1: port 2(veth3fe74d71) entered forwarding state Jul 12 12:37:40 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth3fe74d71: link becomes ready Jul 12 12:37:40 managed-node2 NetworkManager[666]: [1752338260.9931] device (veth3fe74d71): carrier: link connected Jul 12 12:37:41 managed-node2 dnsmasq[29808]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 2 addresses Jul 12 12:37:41 managed-node2 systemd[1]: Started libcontainer container 304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70. -- Subject: Unit libpod-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:41 managed-node2 systemd[1]: Started libcontainer container e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e. -- Subject: Unit libpod-e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.scope has finished starting up. -- -- The start-up result is done. Jul 12 12:37:41 managed-node2 systemd[1]: Started A template for running K8s workloads via podman-kube-play. -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished starting up. -- -- The start-up result is done. Jul 12 12:37:41 managed-node2 podman[31717]: Pod: Jul 12 12:37:41 managed-node2 podman[31717]: ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2 Jul 12 12:37:41 managed-node2 podman[31717]: Container: Jul 12 12:37:41 managed-node2 podman[31717]: e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e Jul 12 12:37:41 managed-node2 sudo[32116]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jyrknhzkjwtoyoqfhtaoymdanzpphasy ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338261.7921255-17011-231273247445257/AnsiballZ_command.py' Jul 12 12:37:41 managed-node2 sudo[32116]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:37:42 managed-node2 platform-python[32119]: ansible-command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:42 managed-node2 systemd[25539]: Started podman-32128.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:37:42 managed-node2 sudo[32116]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:37:42 managed-node2 platform-python[32258]: ansible-command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:42 managed-node2 platform-python[32389]: ansible-command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:43 managed-node2 sudo[32528]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lhqlhpwddcodyczhbsyjvspptskrqirm ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338263.154581-17086-94452808741655/AnsiballZ_command.py' Jul 12 12:37:43 managed-node2 sudo[32528]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:37:43 managed-node2 platform-python[32531]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:43 managed-node2 sudo[32528]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:37:43 managed-node2 platform-python[32657]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:44 managed-node2 platform-python[32783]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:44 managed-node2 platform-python[32909]: ansible-uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:37:45 managed-node2 platform-python[33033]: ansible-uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:37:45 managed-node2 rsyslogd[1025]: imjournal: journal files changed, reloading... [v8.2102.0-15.el8 try https://www.rsyslog.com/e/0 ] Jul 12 12:37:45 managed-node2 platform-python[33158]: ansible-command Invoked with _raw_params=ls -alrtF /tmp/lsr__qviri30_podman/httpd1-create warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:45 managed-node2 platform-python[33282]: ansible-command Invoked with _raw_params=ls -alrtF /tmp/lsr__qviri30_podman/httpd2-create warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:46 managed-node2 platform-python[33406]: ansible-command Invoked with _raw_params=ls -alrtF /tmp/lsr__qviri30_podman/httpd3-create warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:49 managed-node2 platform-python[33655]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:37:50 managed-node2 platform-python[33784]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:37:53 managed-node2 platform-python[33909]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:37:56 managed-node2 platform-python[34032]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 12 12:37:56 managed-node2 platform-python[34159]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 12 12:37:57 managed-node2 platform-python[34286]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 12 12:37:59 managed-node2 platform-python[34409]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:38:02 managed-node2 platform-python[34532]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:38:05 managed-node2 platform-python[34655]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:38:08 managed-node2 platform-python[34778]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 12 12:38:10 managed-node2 platform-python[34939]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 12 12:38:10 managed-node2 platform-python[35062]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 12 12:38:15 managed-node2 platform-python[35185]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jul 12 12:38:15 managed-node2 platform-python[35309]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:38:16 managed-node2 platform-python[35434]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:16 managed-node2 platform-python[35558]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:17 managed-node2 platform-python[35682]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:18 managed-node2 platform-python[35806]: ansible-command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jul 12 12:38:19 managed-node2 platform-python[35929]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:19 managed-node2 platform-python[36052]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:19 managed-node2 sudo[36175]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sfjnrnyknupgcycrjkhhnhuswecfqpyf ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338299.776674-18742-47644857358508/AnsiballZ_podman_image.py' Jul 12 12:38:19 managed-node2 sudo[36175]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:38:20 managed-node2 systemd[25539]: Started podman-36180.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:38:20 managed-node2 systemd[25539]: Started podman-36189.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:38:20 managed-node2 systemd[25539]: Started podman-36197.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:38:20 managed-node2 systemd[25539]: Started podman-36205.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:38:20 managed-node2 systemd[25539]: Started podman-36213.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:38:20 managed-node2 systemd[25539]: Started podman-36222.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:38:20 managed-node2 sudo[36175]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:38:21 managed-node2 platform-python[36351]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:38:21 managed-node2 platform-python[36476]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:22 managed-node2 platform-python[36599]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 12 12:38:22 managed-node2 platform-python[36663]: ansible-file Invoked with owner=podman_basic_user group=3001 mode=0644 dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _original_basename=tmpxhmslwri recurse=False state=file path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:22 managed-node2 sudo[36786]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kddzobyvwijhudrubugwpxpljmgfafhb ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338302.4767652-18857-261073031296101/AnsiballZ_podman_play.py' Jul 12 12:38:22 managed-node2 sudo[36786]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:38:22 managed-node2 platform-python[36789]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:38:22 managed-node2 systemd[25539]: Started podman-36797.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:38:22 managed-node2 platform-python[36789]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jul 12 12:38:22 managed-node2 platform-python[36789]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Jul 12 12:38:22 managed-node2 platform-python[36789]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-07-12T12:38:22-04:00" level=info msg="/bin/podman filtering at log level debug" time="2025-07-12T12:38:22-04:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-07-12T12:38:22-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-07-12T12:38:22-04:00" level=info msg="Using sqlite as database backend" time="2025-07-12T12:38:22-04:00" level=debug msg="Using graph driver overlay" time="2025-07-12T12:38:22-04:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2025-07-12T12:38:22-04:00" level=debug msg="Using run root /run/user/3001/containers" time="2025-07-12T12:38:22-04:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2025-07-12T12:38:22-04:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2025-07-12T12:38:22-04:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2025-07-12T12:38:22-04:00" level=debug msg="Using transient store: false" time="2025-07-12T12:38:22-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-07-12T12:38:22-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-12T12:38:22-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-12T12:38:22-04:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2025-07-12T12:38:22-04:00" level=debug msg="Cached value indicated that native-diff is usable" time="2025-07-12T12:38:22-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2025-07-12T12:38:22-04:00" level=debug msg="Initializing event backend file" time="2025-07-12T12:38:22-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-07-12T12:38:22-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-07-12T12:38:22-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-07-12T12:38:22-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-07-12T12:38:22-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-07-12T12:38:22-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-07-12T12:38:22-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-07-12T12:38:22-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" time="2025-07-12T12:38:22-04:00" level=info msg="Setting parallel job count to 7" time="2025-07-12T12:38:22-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-12 12:37:16.97600692 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-07-12T12:38:22-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-07-12T12:38:22-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-12T12:38:22-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:38:22-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-12T12:38:22-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\"" time="2025-07-12T12:38:22-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-12T12:38:22-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566)" time="2025-07-12T12:38:22-04:00" level=debug msg="exporting opaque data as blob \"sha256:948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\"" time="2025-07-12T12:38:22-04:00" level=debug msg="Pod using bridge network mode" time="2025-07-12T12:38:22-04:00" level=debug msg="Got pod cgroup as /libpod_parent/36ca61264e7e11a7ce277e40b51ec55a9afdcde0d1c0d8549c5c14e962eb5314" Error: adding pod to state: name "httpd1" is in use: pod already exists time="2025-07-12T12:38:22-04:00" level=debug msg="Shutting down engines" Jul 12 12:38:22 managed-node2 platform-python[36789]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125 Jul 12 12:38:22 managed-node2 sudo[36786]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:38:23 managed-node2 platform-python[36952]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 12 12:38:24 managed-node2 platform-python[37076]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:38:25 managed-node2 platform-python[37201]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:26 managed-node2 platform-python[37325]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:27 managed-node2 platform-python[37448]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:28 managed-node2 platform-python[37737]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:38:28 managed-node2 platform-python[37862]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:29 managed-node2 platform-python[37985]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 12 12:38:29 managed-node2 platform-python[38049]: ansible-file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd2.yml _original_basename=tmpl5_fx80_ recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd2.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:29 managed-node2 platform-python[38172]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:38:29 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312.slice. -- Subject: Unit machine-libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312.slice has finished starting up. -- -- The start-up result is done. Jul 12 12:38:29 managed-node2 platform-python[38172]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Jul 12 12:38:29 managed-node2 platform-python[38172]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Jul 12 12:38:29 managed-node2 platform-python[38172]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-07-12T12:38:29-04:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2025-07-12T12:38:29-04:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-07-12T12:38:29-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-07-12T12:38:29-04:00" level=info msg="Using sqlite as database backend" time="2025-07-12T12:38:29-04:00" level=debug msg="Using graph driver overlay" time="2025-07-12T12:38:29-04:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2025-07-12T12:38:29-04:00" level=debug msg="Using run root /run/containers/storage" time="2025-07-12T12:38:29-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2025-07-12T12:38:29-04:00" level=debug msg="Using tmp dir /run/libpod" time="2025-07-12T12:38:29-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2025-07-12T12:38:29-04:00" level=debug msg="Using transient store: false" time="2025-07-12T12:38:29-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-07-12T12:38:29-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-12T12:38:29-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-12T12:38:29-04:00" level=debug msg="Cached value indicated that metacopy is being used" time="2025-07-12T12:38:29-04:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2025-07-12T12:38:29-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2025-07-12T12:38:29-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2025-07-12T12:38:29-04:00" level=debug msg="Initializing event backend file" time="2025-07-12T12:38:29-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-07-12T12:38:29-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-07-12T12:38:29-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-07-12T12:38:29-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-07-12T12:38:29-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-07-12T12:38:29-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-07-12T12:38:29-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-07-12T12:38:29-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" time="2025-07-12T12:38:29-04:00" level=info msg="Setting parallel job count to 7" time="2025-07-12T12:38:29-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-12 12:34:58.774465298 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-07-12T12:38:29-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-07-12T12:38:29-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-12T12:38:29-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-12T12:38:29-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-12T12:38:29-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\"" time="2025-07-12T12:38:29-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-12T12:38:29-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2)" time="2025-07-12T12:38:29-04:00" level=debug msg="exporting opaque data as blob \"sha256:4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\"" time="2025-07-12T12:38:29-04:00" level=debug msg="Pod using bridge network mode" time="2025-07-12T12:38:29-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312.slice for parent machine.slice and name libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312" time="2025-07-12T12:38:29-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312.slice" time="2025-07-12T12:38:29-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312.slice" Error: adding pod to state: name "httpd2" is in use: pod already exists time="2025-07-12T12:38:29-04:00" level=debug msg="Shutting down engines" Jul 12 12:38:29 managed-node2 platform-python[38172]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125 Jul 12 12:38:31 managed-node2 platform-python[38333]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:38:32 managed-node2 platform-python[38458]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:33 managed-node2 platform-python[38582]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:33 managed-node2 platform-python[38705]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:35 managed-node2 platform-python[38995]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:38:35 managed-node2 platform-python[39120]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:35 managed-node2 platform-python[39243]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 12 12:38:36 managed-node2 platform-python[39307]: ansible-file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd3.yml _original_basename=tmpb1ttu3ws recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd3.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:36 managed-node2 platform-python[39430]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:38:36 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_76dbf1d1588fd5e46783829b7ad9b35366a8e6eaea8987a62e2fa617ab2d64d4.slice. -- Subject: Unit machine-libpod_pod_76dbf1d1588fd5e46783829b7ad9b35366a8e6eaea8987a62e2fa617ab2d64d4.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_76dbf1d1588fd5e46783829b7ad9b35366a8e6eaea8987a62e2fa617ab2d64d4.slice has finished starting up. -- -- The start-up result is done. Jul 12 12:38:37 managed-node2 sudo[39591]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-reodnpkicydeipvtrpezylgtxbcjdhgz ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338317.3985052-19629-279558676694792/AnsiballZ_command.py' Jul 12 12:38:37 managed-node2 sudo[39591]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:38:37 managed-node2 platform-python[39594]: ansible-command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:37 managed-node2 systemd[25539]: Started podman-39603.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:38:37 managed-node2 sudo[39591]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:38:38 managed-node2 platform-python[39733]: ansible-command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:38 managed-node2 platform-python[39864]: ansible-command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:38 managed-node2 sudo[39995]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jdxtrtiiowdglcaeyhyrkpgebggwzera ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338318.6450086-19659-70124315420202/AnsiballZ_command.py' Jul 12 12:38:38 managed-node2 sudo[39995]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:38:38 managed-node2 platform-python[39998]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:38 managed-node2 sudo[39995]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:38:39 managed-node2 platform-python[40124]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:39 managed-node2 platform-python[40250]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:40 managed-node2 platform-python[40376]: ansible-uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:40 managed-node2 platform-python[40500]: ansible-uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:40 managed-node2 platform-python[40624]: ansible-uri Invoked with url=http://localhost:15003/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:43 managed-node2 platform-python[40873]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:44 managed-node2 platform-python[41002]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:38:47 managed-node2 platform-python[41127]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jul 12 12:38:48 managed-node2 platform-python[41251]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:38:48 managed-node2 platform-python[41376]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:49 managed-node2 platform-python[41500]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:50 managed-node2 platform-python[41624]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:50 managed-node2 platform-python[41748]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:38:51 managed-node2 sudo[41873]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-phdckzktiusimljvxxeqcswlbkptcgje ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338330.9997387-20292-117543446474536/AnsiballZ_systemd.py' Jul 12 12:38:51 managed-node2 sudo[41873]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:38:51 managed-node2 platform-python[41876]: ansible-systemd Invoked with name= scope=user state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 12 12:38:51 managed-node2 systemd[25539]: Reloading. Jul 12 12:38:51 managed-node2 systemd[25539]: Stopping A template for running K8s workloads via podman-kube-play... -- Subject: Unit UNIT has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun shutting down. Jul 12 12:38:51 managed-node2 kernel: cni-podman1: port 1(veth73ffc199) entered disabled state Jul 12 12:38:51 managed-node2 kernel: device veth73ffc199 left promiscuous mode Jul 12 12:38:51 managed-node2 kernel: cni-podman1: port 1(veth73ffc199) entered disabled state Jul 12 12:38:51 managed-node2 podman[41892]: Pods stopped: Jul 12 12:38:51 managed-node2 podman[41892]: e6000b000d16f866b55a1a19dd3fb3262c82c732a816136db57b7e8368f0a32a Jul 12 12:38:51 managed-node2 podman[41892]: Pods removed: Jul 12 12:38:51 managed-node2 podman[41892]: e6000b000d16f866b55a1a19dd3fb3262c82c732a816136db57b7e8368f0a32a Jul 12 12:38:51 managed-node2 podman[41892]: Secrets removed: Jul 12 12:38:51 managed-node2 podman[41892]: Volumes removed: Jul 12 12:38:51 managed-node2 systemd[25539]: Stopped A template for running K8s workloads via podman-kube-play. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 12 12:38:51 managed-node2 sudo[41873]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:38:52 managed-node2 platform-python[42165]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:38:52 managed-node2 sudo[42290]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nazeochktfswzfvlptenlckqnldzbmyv ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338332.4280062-20367-151061681885350/AnsiballZ_podman_play.py' Jul 12 12:38:52 managed-node2 sudo[42290]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:38:52 managed-node2 platform-python[42293]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:38:52 managed-node2 platform-python[42293]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jul 12 12:38:52 managed-node2 systemd[25539]: Started podman-42301.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:38:52 managed-node2 platform-python[42293]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman kube play --down /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jul 12 12:38:52 managed-node2 platform-python[42293]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped: Pods removed: Secrets removed: Volumes removed: Jul 12 12:38:52 managed-node2 platform-python[42293]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: Jul 12 12:38:52 managed-node2 platform-python[42293]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jul 12 12:38:52 managed-node2 sudo[42290]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:38:53 managed-node2 platform-python[42430]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:38:54 managed-node2 platform-python[42553]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 12 12:38:54 managed-node2 platform-python[42677]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:38:56 managed-node2 platform-python[42802]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:38:56 managed-node2 platform-python[42926]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 12 12:38:56 managed-node2 systemd[1]: Reloading. Jul 12 12:38:57 managed-node2 systemd[1]: Stopping A template for running K8s workloads via podman-kube-play... -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun shutting down. Jul 12 12:38:57 managed-node2 systemd[1]: libpod-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.scope has successfully entered the 'dead' state. Jul 12 12:38:57 managed-node2 systemd[1]: libpod-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.scope: Consumed 32ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.scope completed and consumed the indicated resources. Jul 12 12:38:57 managed-node2 systemd[1]: libpod-fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.scope has successfully entered the 'dead' state. Jul 12 12:38:57 managed-node2 systemd[1]: libpod-fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.scope: Consumed 34ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.scope completed and consumed the indicated resources. Jul 12 12:38:57 managed-node2 dnsmasq[29808]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Jul 12 12:38:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay-ad05b883b876cb925ec05b9fafaf9a8a37fd48a25d5d54b9615f3f4cdf0bd3b3-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-ad05b883b876cb925ec05b9fafaf9a8a37fd48a25d5d54b9615f3f4cdf0bd3b3-merged.mount has successfully entered the 'dead' state. Jul 12 12:38:57 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered disabled state Jul 12 12:38:57 managed-node2 kernel: device vethec9deee2 left promiscuous mode Jul 12 12:38:57 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered disabled state Jul 12 12:38:57 managed-node2 systemd[1]: run-netns-netns\x2d52414ca9\x2df342\x2dd1f3\x2d8cce\x2d232fb04744c1.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-netns-netns\x2d52414ca9\x2df342\x2dd1f3\x2d8cce\x2d232fb04744c1.mount has successfully entered the 'dead' state. Jul 12 12:38:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a-userdata-shm.mount has successfully entered the 'dead' state. Jul 12 12:38:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay-2d63d07bf8161ced4731534605fa38c1618204d50fc3a412c2eb303e296f3b5e-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-2d63d07bf8161ced4731534605fa38c1618204d50fc3a412c2eb303e296f3b5e-merged.mount has successfully entered the 'dead' state. Jul 12 12:38:57 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice. -- Subject: Unit machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice has finished shutting down. Jul 12 12:38:57 managed-node2 systemd[1]: machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice: Consumed 67ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice completed and consumed the indicated resources. Jul 12 12:38:57 managed-node2 podman[42962]: Pods stopped: Jul 12 12:38:57 managed-node2 podman[42962]: ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5 Jul 12 12:38:57 managed-node2 podman[42962]: Pods removed: Jul 12 12:38:57 managed-node2 podman[42962]: ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5 Jul 12 12:38:57 managed-node2 podman[42962]: Secrets removed: Jul 12 12:38:57 managed-node2 podman[42962]: Volumes removed: Jul 12 12:38:57 managed-node2 systemd[1]: libpod-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.scope has successfully entered the 'dead' state. Jul 12 12:38:57 managed-node2 systemd[1]: libpod-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.scope: Consumed 34ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.scope completed and consumed the indicated resources. Jul 12 12:38:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315-userdata-shm.mount has successfully entered the 'dead' state. Jul 12 12:38:57 managed-node2 dnsmasq[29808]: exiting on receipt of SIGTERM Jul 12 12:38:57 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has successfully entered the 'dead' state. Jul 12 12:38:57 managed-node2 systemd[1]: Stopped A template for running K8s workloads via podman-kube-play. -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished shutting down. Jul 12 12:38:58 managed-node2 platform-python[43238]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:38:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay-495aa6291e9f835076198c3e1c7b8cf1909ca8b5400bdf0e5a851ba0c44119c1-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-495aa6291e9f835076198c3e1c7b8cf1909ca8b5400bdf0e5a851ba0c44119c1-merged.mount has successfully entered the 'dead' state. Jul 12 12:38:58 managed-node2 platform-python[43363]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:38:58 managed-node2 platform-python[43363]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/httpd2.yml Jul 12 12:38:58 managed-node2 platform-python[43363]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman kube play --down /etc/containers/ansible-kubernetes.d/httpd2.yml Jul 12 12:38:58 managed-node2 platform-python[43363]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped: Pods removed: Secrets removed: Volumes removed: Jul 12 12:38:58 managed-node2 platform-python[43363]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: Jul 12 12:38:58 managed-node2 platform-python[43363]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jul 12 12:38:58 managed-node2 platform-python[43499]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:39:00 managed-node2 platform-python[43622]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:01 managed-node2 platform-python[43747]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:02 managed-node2 platform-python[43871]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 12 12:39:02 managed-node2 systemd[1]: Reloading. Jul 12 12:39:02 managed-node2 systemd[1]: Stopping A template for running K8s workloads via podman-kube-play... -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun shutting down. Jul 12 12:39:02 managed-node2 systemd[1]: libpod-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.scope has successfully entered the 'dead' state. Jul 12 12:39:02 managed-node2 systemd[1]: libpod-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.scope: Consumed 33ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.scope completed and consumed the indicated resources. Jul 12 12:39:02 managed-node2 systemd[1]: libpod-e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.scope has successfully entered the 'dead' state. Jul 12 12:39:02 managed-node2 systemd[1]: libpod-e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.scope: Consumed 33ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.scope completed and consumed the indicated resources. Jul 12 12:39:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay-9f3109ed9592a16625c27d2daaac765746798fb973c8fcb3160951dbc3c83474-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-9f3109ed9592a16625c27d2daaac765746798fb973c8fcb3160951dbc3c83474-merged.mount has successfully entered the 'dead' state. Jul 12 12:39:02 managed-node2 kernel: cni-podman1: port 2(veth3fe74d71) entered disabled state Jul 12 12:39:02 managed-node2 kernel: device veth3fe74d71 left promiscuous mode Jul 12 12:39:02 managed-node2 kernel: cni-podman1: port 2(veth3fe74d71) entered disabled state Jul 12 12:39:02 managed-node2 systemd[1]: run-netns-netns\x2dda1f9efe\x2d2607\x2d2465\x2d3389\x2d63a80a061169.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-netns-netns\x2dda1f9efe\x2d2607\x2d2465\x2d3389\x2d63a80a061169.mount has successfully entered the 'dead' state. Jul 12 12:39:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70-userdata-shm.mount has successfully entered the 'dead' state. Jul 12 12:39:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay-b047a8f535e44a79e89943c24ecd0f40472ad6c74487b61c695a5612de0f66e9-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-b047a8f535e44a79e89943c24ecd0f40472ad6c74487b61c695a5612de0f66e9-merged.mount has successfully entered the 'dead' state. Jul 12 12:39:02 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice. -- Subject: Unit machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice has finished shutting down. Jul 12 12:39:02 managed-node2 systemd[1]: machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice: Consumed 66ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice completed and consumed the indicated resources. Jul 12 12:39:02 managed-node2 podman[43907]: Pods stopped: Jul 12 12:39:02 managed-node2 podman[43907]: ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2 Jul 12 12:39:02 managed-node2 podman[43907]: Pods removed: Jul 12 12:39:02 managed-node2 podman[43907]: ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2 Jul 12 12:39:02 managed-node2 podman[43907]: Secrets removed: Jul 12 12:39:02 managed-node2 podman[43907]: Volumes removed: Jul 12 12:39:02 managed-node2 systemd[1]: libpod-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.scope has successfully entered the 'dead' state. Jul 12 12:39:02 managed-node2 systemd[1]: libpod-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.scope: Consumed 36ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.scope completed and consumed the indicated resources. Jul 12 12:39:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b-userdata-shm.mount has successfully entered the 'dead' state. Jul 12 12:39:02 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has successfully entered the 'dead' state. Jul 12 12:39:02 managed-node2 systemd[1]: Stopped A template for running K8s workloads via podman-kube-play. -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished shutting down. Jul 12 12:39:03 managed-node2 platform-python[44179]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay-a2ca6c0802e64aa881912046899069f9906c3a3ecb7fd7f0e60445f767b453fb-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-a2ca6c0802e64aa881912046899069f9906c3a3ecb7fd7f0e60445f767b453fb-merged.mount has successfully entered the 'dead' state. Jul 12 12:39:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:39:03 managed-node2 platform-python[44304]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 12 12:39:03 managed-node2 platform-python[44304]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/httpd3.yml Jul 12 12:39:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:39:04 managed-node2 platform-python[44440]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:39:05 managed-node2 platform-python[44563]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None Jul 12 12:39:05 managed-node2 platform-python[44687]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:05 managed-node2 sudo[44812]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-efsiwiyrgguftoqfmdsvrczsjrcdxihg ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338345.768981-21015-270850189165831/AnsiballZ_podman_container_info.py' Jul 12 12:39:05 managed-node2 sudo[44812]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:39:06 managed-node2 platform-python[44815]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None Jul 12 12:39:06 managed-node2 systemd[25539]: Started podman-44817.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:39:06 managed-node2 sudo[44812]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:39:06 managed-node2 sudo[44946]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lgxpwgdjqpsoqirugaueifldgtghyuxf ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338346.3029222-21038-175701710527734/AnsiballZ_command.py' Jul 12 12:39:06 managed-node2 sudo[44946]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:39:06 managed-node2 platform-python[44949]: ansible-command Invoked with _raw_params=podman network ls -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:06 managed-node2 systemd[25539]: Started podman-44951.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:39:06 managed-node2 sudo[44946]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:39:06 managed-node2 sudo[45105]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-plsfjelikobxnwisunpzotpprpzjinoh ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338346.7950253-21068-8970032815672/AnsiballZ_command.py' Jul 12 12:39:06 managed-node2 sudo[45105]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:39:07 managed-node2 platform-python[45108]: ansible-command Invoked with _raw_params=podman secret ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:07 managed-node2 systemd[25539]: Started podman-45110.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:39:07 managed-node2 sudo[45105]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:39:07 managed-node2 platform-python[45239]: ansible-command Invoked with removes=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl disable-linger podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None stdin=None Jul 12 12:39:07 managed-node2 systemd[1]: Stopping User Manager for UID 3001... -- Subject: Unit user@3001.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@3001.service has begun shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Stopping podman-pause-5a039c99.scope. -- Subject: Unit UNIT has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Stopped target Default. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Removed slice podman\x2dkube.slice. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Stopping D-Bus User Message Bus... -- Subject: Unit UNIT has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Stopped D-Bus User Message Bus. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Stopped target Basic System. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Stopped target Timers. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Stopped Mark boot as successful after the user session has run 2 minutes. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Stopped target Paths. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Stopped target Sockets. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Closed D-Bus User Message Bus Socket. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Stopped podman-pause-5a039c99.scope. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Removed slice user.slice. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[25539]: Reached target Shutdown. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:39:07 managed-node2 systemd[25539]: Started Exit the Session. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:39:07 managed-node2 systemd[25539]: Reached target Exit the Session. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 12 12:39:07 managed-node2 systemd[25545]: pam_unix(systemd-user:session): session closed for user podman_basic_user Jul 12 12:39:07 managed-node2 systemd[1]: user@3001.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit user@3001.service has successfully entered the 'dead' state. Jul 12 12:39:07 managed-node2 systemd[1]: Stopped User Manager for UID 3001. -- Subject: Unit user@3001.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@3001.service has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[1]: Stopping User runtime directory /run/user/3001... -- Subject: Unit user-runtime-dir@3001.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@3001.service has begun shutting down. Jul 12 12:39:07 managed-node2 systemd[1]: run-user-3001.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-user-3001.mount has successfully entered the 'dead' state. Jul 12 12:39:07 managed-node2 systemd[1]: user-runtime-dir@3001.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit user-runtime-dir@3001.service has successfully entered the 'dead' state. Jul 12 12:39:07 managed-node2 systemd[1]: Stopped User runtime directory /run/user/3001. -- Subject: Unit user-runtime-dir@3001.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@3001.service has finished shutting down. Jul 12 12:39:07 managed-node2 systemd[1]: Removed slice User Slice of UID 3001. -- Subject: Unit user-3001.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-3001.slice has finished shutting down. Jul 12 12:39:07 managed-node2 platform-python[45371]: ansible-command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:08 managed-node2 sudo[45495]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gazcsbdiijzpmpohefmybwwgcnpxuufr ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338348.4017277-21172-132611654922840/AnsiballZ_command.py' Jul 12 12:39:08 managed-node2 sudo[45495]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:39:08 managed-node2 platform-python[45498]: ansible-command Invoked with _raw_params=podman pod exists httpd1 warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:08 managed-node2 sudo[45495]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:39:09 managed-node2 platform-python[45628]: ansible-command Invoked with _raw_params=podman pod exists httpd2 warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:09 managed-node2 platform-python[45758]: ansible-command Invoked with _raw_params=podman pod exists httpd3 warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:09 managed-node2 sudo[45888]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pizwdchaqbkhharmotzkhmtxjzrasqsn ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338349.5796022-21223-271577239366846/AnsiballZ_command.py' Jul 12 12:39:09 managed-node2 sudo[45888]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 12 12:39:09 managed-node2 platform-python[45891]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:09 managed-node2 sudo[45888]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 12 12:39:10 managed-node2 platform-python[46017]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:10 managed-node2 platform-python[46143]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:10 managed-node2 platform-python[46269]: ansible-stat Invoked with path=/var/lib/systemd/linger/podman_basic_user follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:13 managed-node2 platform-python[46517]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:14 managed-node2 platform-python[46646]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 12 12:39:15 managed-node2 platform-python[46770]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:17 managed-node2 platform-python[46895]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jul 12 12:39:18 managed-node2 platform-python[47019]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:18 managed-node2 platform-python[47144]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:19 managed-node2 platform-python[47268]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:20 managed-node2 platform-python[47392]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:20 managed-node2 platform-python[47516]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:21 managed-node2 platform-python[47639]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:21 managed-node2 platform-python[47762]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:39:23 managed-node2 platform-python[47885]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 12 12:39:23 managed-node2 platform-python[48009]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:24 managed-node2 platform-python[48134]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:25 managed-node2 platform-python[48258]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 12 12:39:26 managed-node2 platform-python[48385]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:26 managed-node2 platform-python[48508]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:39:27 managed-node2 platform-python[48631]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:29 managed-node2 platform-python[48756]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:29 managed-node2 platform-python[48880]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 12 12:39:30 managed-node2 platform-python[49007]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:30 managed-node2 platform-python[49130]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:39:31 managed-node2 platform-python[49253]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None Jul 12 12:39:32 managed-node2 platform-python[49377]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:32 managed-node2 platform-python[49500]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:39:33 managed-node2 platform-python[49623]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:39:36 managed-node2 platform-python[49785]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version', 'os_family'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d Jul 12 12:39:36 managed-node2 platform-python[49912]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:36 managed-node2 platform-python[50035]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:39 managed-node2 platform-python[50283]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:40 managed-node2 platform-python[50412]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 12 12:39:41 managed-node2 platform-python[50536]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:43 managed-node2 platform-python[50700]: ansible-setup Invoked with gather_subset=['all'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d Jul 12 12:39:46 managed-node2 platform-python[50852]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:47 managed-node2 platform-python[50975]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:49 managed-node2 platform-python[51223]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:50 managed-node2 platform-python[51352]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 12 12:39:50 managed-node2 platform-python[51476]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:56 managed-node2 platform-python[51640]: ansible-setup Invoked with gather_subset=['all'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d Jul 12 12:39:57 managed-node2 platform-python[51792]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:39:57 managed-node2 platform-python[51915]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:39:58 managed-node2 platform-python[52039]: ansible-dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:40:02 managed-node2 platform-python[52167]: ansible-dnf Invoked with name=['certmonger'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:40:04 managed-node2 dbus-daemon[601]: [system] Reloaded configuration Jul 12 12:40:04 managed-node2 dbus-daemon[601]: [system] Reloaded configuration Jul 12 12:40:04 managed-node2 dbus-daemon[601]: [system] Reloaded configuration Jul 12 12:40:04 managed-node2 dbus-daemon[601]: [system] Reloaded configuration Jul 12 12:40:04 managed-node2 dbus-daemon[601]: [system] Reloaded configuration Jul 12 12:40:04 managed-node2 systemd[1]: Reloading. Jul 12 12:40:05 managed-node2 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. -- Subject: Unit run-rbd3345bfad0b449fb2e69833e5ca39b9.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit run-rbd3345bfad0b449fb2e69833e5ca39b9.service has finished starting up. -- -- The start-up result is done. Jul 12 12:40:05 managed-node2 systemd[1]: Starting man-db-cache-update.service... -- Subject: Unit man-db-cache-update.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has begun starting up. Jul 12 12:40:05 managed-node2 systemd[1]: Reloading. Jul 12 12:40:05 managed-node2 systemd[1]: man-db-cache-update.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit man-db-cache-update.service has successfully entered the 'dead' state. Jul 12 12:40:05 managed-node2 systemd[1]: Started man-db-cache-update.service. -- Subject: Unit man-db-cache-update.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has finished starting up. -- -- The start-up result is done. Jul 12 12:40:05 managed-node2 systemd[1]: run-rbd3345bfad0b449fb2e69833e5ca39b9.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-rbd3345bfad0b449fb2e69833e5ca39b9.service has successfully entered the 'dead' state. Jul 12 12:40:06 managed-node2 platform-python[52799]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:40:06 managed-node2 platform-python[52922]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:40:07 managed-node2 platform-python[53045]: ansible-systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 12 12:40:07 managed-node2 systemd[1]: Reloading. Jul 12 12:40:07 managed-node2 systemd[1]: Starting Certificate monitoring and PKI enrollment... -- Subject: Unit certmonger.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit certmonger.service has begun starting up. Jul 12 12:40:07 managed-node2 systemd[1]: Started Certificate monitoring and PKI enrollment. -- Subject: Unit certmonger.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit certmonger.service has finished starting up. -- -- The start-up result is done. Jul 12 12:40:08 managed-node2 platform-python[53238]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=# # Ansible managed # # system_role:certificate booted=True provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 certmonger[53254]: Certificate in file "/etc/pki/tls/certs/quadlet_demo.crt" issued by CA and saved. Jul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:08 managed-node2 platform-python[53376]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Jul 12 12:40:09 managed-node2 platform-python[53499]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key Jul 12 12:40:09 managed-node2 platform-python[53622]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Jul 12 12:40:10 managed-node2 platform-python[53745]: ansible-command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:40:10 managed-node2 certmonger[53081]: 2025-07-12 12:40:10 [53081] Wrote to /var/lib/certmonger/requests/20250712164008 Jul 12 12:40:10 managed-node2 platform-python[53869]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:40:10 managed-node2 platform-python[53992]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:40:11 managed-node2 platform-python[54115]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 12 12:40:11 managed-node2 platform-python[54238]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:40:12 managed-node2 platform-python[54361]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:40:14 managed-node2 platform-python[54609]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:40:15 managed-node2 platform-python[54738]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 12 12:40:16 managed-node2 platform-python[54862]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:40:17 managed-node2 platform-python[54987]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:40:18 managed-node2 platform-python[55110]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:40:18 managed-node2 platform-python[55233]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:40:19 managed-node2 platform-python[55357]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:40:22 managed-node2 platform-python[55480]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 12 12:40:22 managed-node2 platform-python[55607]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 12 12:40:23 managed-node2 platform-python[55734]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 12 12:40:24 managed-node2 platform-python[55857]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 12 12:40:26 managed-node2 platform-python[55980]: ansible-command Invoked with _raw_params=journalctl -ex warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:40:26 managed-node2 platform-python[56104]: ansible-command Invoked with _raw_params=podman ps -a warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:40:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay-metacopy\x2dcheck3019378336-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-metacopy\x2dcheck3019378336-merged.mount has successfully entered the 'dead' state. Jul 12 12:40:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 12 12:40:27 managed-node2 platform-python[56234]: ansible-command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:40:27 managed-node2 platform-python[56364]: ansible-command Invoked with _raw_params=set -euo pipefail; systemctl list-units --all | grep quadlet _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:40:28 managed-node2 platform-python[56490]: ansible-command Invoked with _raw_params=ls -alrtF /etc/systemd/system warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:40:30 managed-node2 platform-python[56739]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:40:31 managed-node2 platform-python[56868]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 12 12:40:33 managed-node2 platform-python[56993]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 12 12:40:36 managed-node2 platform-python[57116]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 12 12:40:36 managed-node2 platform-python[57243]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 12 12:40:37 managed-node2 platform-python[57370]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 12 12:40:38 managed-node2 platform-python[57493]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 12 12:40:39 managed-node2 platform-python[57616]: ansible-command Invoked with _raw_params=exec 1>&2 set -x set -o pipefail systemctl list-units --plain -l --all | grep quadlet || : systemctl list-unit-files --all | grep quadlet || : systemctl list-units --plain --failed -l --all | grep quadlet || : _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 12 12:40:40 managed-node2 platform-python[57746]: ansible-command Invoked with _raw_params=journalctl -ex warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None PLAY RECAP ********************************************************************* managed-node2 : ok=90 changed=8 unreachable=0 failed=2 skipped=140 rescued=2 ignored=0 SYSTEM ROLES ERRORS BEGIN v1 [ { "ansible_version": "2.9.27", "end_time": "2025-07-12T16:40:26.032684+00:00Z", "host": "managed-node2", "message": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "start_time": "2025-07-12T16:40:26.014302+00:00Z", "task_name": "Manage each secret", "task_path": "/tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41" }, { "ansible_version": "2.9.27", "delta": "0:00:00.027579", "end_time": "2025-07-12 12:40:26.369978", "host": "managed-node2", "message": "No message could be found", "rc": 0, "start_time": "2025-07-12 12:40:26.342399", "stdout": "-- Logs begin at Sat 2025-07-12 12:29:00 EDT, end at Sat 2025-07-12 12:40:26 EDT. --\nJul 12 12:33:58 managed-node2 systemd[1]: Mounting FUSE Control File System...\n-- Subject: Unit sys-fs-fuse-connections.mount has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit sys-fs-fuse-connections.mount has begun starting up.\nJul 12 12:33:58 managed-node2 systemd[1]: Mounted FUSE Control File System.\n-- Subject: Unit sys-fs-fuse-connections.mount has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit sys-fs-fuse-connections.mount has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:33:59 managed-node2 dbus-daemon[601]: [system] Reloaded configuration\nJul 12 12:33:59 managed-node2 dbus-daemon[601]: [system] Reloaded configuration\nJul 12 12:34:23 managed-node2 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update.\n-- Subject: Unit run-rb64140c001d3434a8ef1cd16a214e1b7.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit run-rb64140c001d3434a8ef1cd16a214e1b7.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:34:23 managed-node2 systemd[1]: cgroup compatibility translation between legacy and unified hierarchy settings activated. See cgroup-compat debug messages for details.\nJul 12 12:34:23 managed-node2 systemd[1]: Starting man-db-cache-update.service...\n-- Subject: Unit man-db-cache-update.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit man-db-cache-update.service has begun starting up.\nJul 12 12:34:23 managed-node2 systemd[1]: Reloading.\nJul 12 12:34:24 managed-node2 sudo[9345]: pam_unix(sudo:session): session closed for user root\nJul 12 12:34:25 managed-node2 systemd[1]: man-db-cache-update.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit man-db-cache-update.service has successfully entered the 'dead' state.\nJul 12 12:34:25 managed-node2 systemd[1]: Started man-db-cache-update.service.\n-- Subject: Unit man-db-cache-update.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit man-db-cache-update.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:34:25 managed-node2 systemd[1]: run-rb64140c001d3434a8ef1cd16a214e1b7.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-rb64140c001d3434a8ef1cd16a214e1b7.service has successfully entered the 'dead' state.\nJul 12 12:34:25 managed-node2 platform-python[11899]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:34:26 managed-node2 platform-python[12028]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 12 12:34:26 managed-node2 platform-python[12152]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:34:28 managed-node2 platform-python[12277]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:34:28 managed-node2 platform-python[12400]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:34:29 managed-node2 platform-python[12523]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:34:29 managed-node2 platform-python[12647]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:34:32 managed-node2 platform-python[12770]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 12 12:34:33 managed-node2 platform-python[12897]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 12 12:34:33 managed-node2 systemd[1]: Reloading.\nJul 12 12:34:33 managed-node2 systemd[1]: Starting firewalld - dynamic firewall daemon...\n-- Subject: Unit firewalld.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit firewalld.service has begun starting up.\nJul 12 12:34:34 managed-node2 systemd[1]: Started firewalld - dynamic firewall daemon.\n-- Subject: Unit firewalld.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit firewalld.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:34:34 managed-node2 firewalld[12934]: WARNING: AllowZoneDrifting is enabled. This is considered an insecure configuration option. It will be removed in a future release. Please consider disabling it now.\nJul 12 12:34:35 managed-node2 platform-python[13124]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 12 12:34:36 managed-node2 platform-python[13247]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:34:36 managed-node2 platform-python[13370]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:34:37 managed-node2 platform-python[13493]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:34:40 managed-node2 platform-python[13616]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:34:42 managed-node2 platform-python[13739]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:34:45 managed-node2 dbus-daemon[601]: [system] Reloaded configuration\nJul 12 12:34:45 managed-node2 dbus-daemon[601]: [system] Reloaded configuration\nJul 12 12:34:45 managed-node2 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update.\n-- Subject: Unit run-rc4136976cbe94ee39dd82aa6d795790f.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit run-rc4136976cbe94ee39dd82aa6d795790f.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:34:45 managed-node2 systemd[1]: Starting man-db-cache-update.service...\n-- Subject: Unit man-db-cache-update.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit man-db-cache-update.service has begun starting up.\nJul 12 12:34:46 managed-node2 systemd[1]: man-db-cache-update.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit man-db-cache-update.service has successfully entered the 'dead' state.\nJul 12 12:34:46 managed-node2 systemd[1]: Started man-db-cache-update.service.\n-- Subject: Unit man-db-cache-update.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit man-db-cache-update.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:34:46 managed-node2 systemd[1]: run-rc4136976cbe94ee39dd82aa6d795790f.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-rc4136976cbe94ee39dd82aa6d795790f.service has successfully entered the 'dead' state.\nJul 12 12:34:46 managed-node2 platform-python[14345]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 12 12:34:47 managed-node2 platform-python[14493]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:34:48 managed-node2 platform-python[14617]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 12 12:34:49 managed-node2 kernel: SELinux: Converting 460 SID table entries...\nJul 12 12:34:49 managed-node2 kernel: SELinux: policy capability network_peer_controls=1\nJul 12 12:34:49 managed-node2 kernel: SELinux: policy capability open_perms=1\nJul 12 12:34:49 managed-node2 kernel: SELinux: policy capability extended_socket_class=1\nJul 12 12:34:49 managed-node2 kernel: SELinux: policy capability always_check_network=0\nJul 12 12:34:49 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1\nJul 12 12:34:49 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1\nJul 12 12:34:49 managed-node2 dbus-daemon[601]: [system] Reloaded configuration\nJul 12 12:34:50 managed-node2 platform-python[14744]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 12 12:34:55 managed-node2 platform-python[14867]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:34:57 managed-node2 platform-python[14992]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:34:57 managed-node2 platform-python[15115]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:34:57 managed-node2 platform-python[15238]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 12 12:34:58 managed-node2 platform-python[15337]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/nopull.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752338097.5592623-9962-32376786540712/source _original_basename=tmp571i0p6f follow=False checksum=d5dc917e3cae36de03aa971a17ac473f86fdf934 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None\nJul 12 12:34:58 managed-node2 platform-python[15462]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:34:58 managed-node2 kernel: evm: overlay not supported\nJul 12 12:34:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay-metacopy\\x2dcheck2773103887-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-metacopy\\x2dcheck2773103887-merged.mount has successfully entered the 'dead' state.\nJul 12 12:34:59 managed-node2 systemd[1]: Created slice machine.slice.\n-- Subject: Unit machine.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:34:59 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice.\n-- Subject: Unit machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:34:59 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:35:03 managed-node2 platform-python[15788]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:35:04 managed-node2 platform-python[15917]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:35:07 managed-node2 platform-python[16042]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:35:11 managed-node2 platform-python[16165]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 12 12:35:11 managed-node2 platform-python[16292]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 12 12:35:12 managed-node2 platform-python[16419]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 12 12:35:14 managed-node2 platform-python[16542]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:35:17 managed-node2 platform-python[16665]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:35:19 managed-node2 platform-python[16788]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:35:22 managed-node2 platform-python[16911]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 12 12:35:24 managed-node2 platform-python[17059]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 12 12:35:25 managed-node2 platform-python[17182]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 12 12:35:30 managed-node2 platform-python[17305]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:35:31 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:35:32 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:35:32 managed-node2 platform-python[17567]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:35:32 managed-node2 platform-python[17690]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:35:33 managed-node2 platform-python[17813]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 12 12:35:33 managed-node2 platform-python[17912]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/bogus.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752338133.0421798-11440-200522690369055/source _original_basename=tmpx4spj4rr follow=False checksum=f8266a972ed3be7e204d2a67883fe3a22b8dbf18 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None\nJul 12 12:35:34 managed-node2 platform-python[18037]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:35:34 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice.\n-- Subject: Unit machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:35:34 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:35:37 managed-node2 platform-python[18324]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:35:38 managed-node2 platform-python[18453]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:35:40 managed-node2 platform-python[18578]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:35:43 managed-node2 platform-python[18701]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 12 12:35:44 managed-node2 platform-python[18828]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 12 12:35:45 managed-node2 platform-python[18955]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 12 12:35:47 managed-node2 platform-python[19078]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:35:50 managed-node2 platform-python[19201]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:35:52 managed-node2 platform-python[19324]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:35:55 managed-node2 platform-python[19447]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 12 12:35:57 managed-node2 platform-python[19595]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 12 12:35:58 managed-node2 platform-python[19718]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 12 12:36:02 managed-node2 platform-python[19841]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:36:03 managed-node2 platform-python[19966]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:36:04 managed-node2 platform-python[20090]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 12 12:36:04 managed-node2 platform-python[20217]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:36:05 managed-node2 platform-python[20342]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:36:05 managed-node2 platform-python[20342]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/nopull.yml\nJul 12 12:36:05 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice.\n-- Subject: Unit machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice has finished shutting down.\nJul 12 12:36:05 managed-node2 systemd[1]: machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice: Consumed 0 CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice completed and consumed the indicated resources.\nJul 12 12:36:05 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:36:05 managed-node2 platform-python[20480]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:36:06 managed-node2 platform-python[20603]: ansible-command Invoked with _raw_params=podman image prune -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:36:09 managed-node2 platform-python[20858]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:36:10 managed-node2 platform-python[20987]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:36:13 managed-node2 platform-python[21112]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:36:16 managed-node2 platform-python[21235]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 12 12:36:17 managed-node2 platform-python[21362]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 12 12:36:18 managed-node2 platform-python[21489]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 12 12:36:19 managed-node2 platform-python[21612]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:36:22 managed-node2 platform-python[21735]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:36:25 managed-node2 platform-python[21858]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:36:28 managed-node2 platform-python[21981]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 12 12:36:30 managed-node2 platform-python[22129]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 12 12:36:31 managed-node2 platform-python[22252]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 12 12:36:35 managed-node2 platform-python[22375]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:36:36 managed-node2 platform-python[22500]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:36:37 managed-node2 platform-python[22624]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 12 12:36:37 managed-node2 platform-python[22751]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:36:38 managed-node2 platform-python[22876]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:36:38 managed-node2 platform-python[22876]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/bogus.yml\nJul 12 12:36:38 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice.\n-- Subject: Unit machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice has finished shutting down.\nJul 12 12:36:38 managed-node2 systemd[1]: machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice: Consumed 0 CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice completed and consumed the indicated resources.\nJul 12 12:36:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:36:38 managed-node2 platform-python[23015]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:36:39 managed-node2 platform-python[23138]: ansible-command Invoked with _raw_params=podman image prune -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:36:39 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:36:42 managed-node2 platform-python[23394]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:36:44 managed-node2 platform-python[23523]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:36:46 managed-node2 platform-python[23648]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:36:50 managed-node2 platform-python[23771]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 12 12:36:50 managed-node2 platform-python[23898]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 12 12:36:51 managed-node2 platform-python[24025]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 12 12:36:52 managed-node2 platform-python[24148]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:36:55 managed-node2 platform-python[24271]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:36:58 managed-node2 platform-python[24394]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:37:01 managed-node2 platform-python[24517]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 12 12:37:03 managed-node2 platform-python[24665]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 12 12:37:04 managed-node2 platform-python[24788]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 12 12:37:08 managed-node2 platform-python[24911]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None\nJul 12 12:37:09 managed-node2 platform-python[25036]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:37:09 managed-node2 platform-python[25161]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:10 managed-node2 platform-python[25285]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:11 managed-node2 platform-python[25409]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:12 managed-node2 platform-python[25533]: ansible-command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None\nJul 12 12:37:12 managed-node2 systemd[1]: Created slice User Slice of UID 3001.\n-- Subject: Unit user-3001.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user-3001.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:12 managed-node2 systemd[1]: Starting User runtime directory /run/user/3001...\n-- Subject: Unit user-runtime-dir@3001.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user-runtime-dir@3001.service has begun starting up.\nJul 12 12:37:12 managed-node2 systemd[1]: Started User runtime directory /run/user/3001.\n-- Subject: Unit user-runtime-dir@3001.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user-runtime-dir@3001.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:12 managed-node2 systemd[1]: Starting User Manager for UID 3001...\n-- Subject: Unit user@3001.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user@3001.service has begun starting up.\nJul 12 12:37:12 managed-node2 systemd[25539]: pam_unix(systemd-user:session): session opened for user podman_basic_user by (uid=0)\nJul 12 12:37:12 managed-node2 systemd[25539]: Reached target Paths.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:12 managed-node2 systemd[25539]: Started Mark boot as successful after the user session has run 2 minutes.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:12 managed-node2 systemd[25539]: Starting D-Bus User Message Bus Socket.\n-- Subject: Unit UNIT has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has begun starting up.\nJul 12 12:37:12 managed-node2 systemd[25539]: Reached target Timers.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:12 managed-node2 systemd[25539]: Listening on D-Bus User Message Bus Socket.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:12 managed-node2 systemd[25539]: Reached target Sockets.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:12 managed-node2 systemd[25539]: Reached target Basic System.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:12 managed-node2 systemd[25539]: Reached target Default.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:12 managed-node2 systemd[25539]: Startup finished in 28ms.\n-- Subject: User manager start-up is now complete\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The user manager instance for user 3001 has been started. All services queued\n-- for starting have been started. Note that other services might still be starting\n-- up or be started at any later time.\n-- \n-- Startup of the manager took 28872 microseconds.\nJul 12 12:37:12 managed-node2 systemd[1]: Started User Manager for UID 3001.\n-- Subject: Unit user@3001.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user@3001.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:13 managed-node2 platform-python[25674]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:37:13 managed-node2 platform-python[25797]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:37:13 managed-node2 sudo[25920]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-flowetcsnhyltwcqlvhwzynouopxqrjl ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338233.6175296-15753-169052845046334/AnsiballZ_podman_image.py'\nJul 12 12:37:13 managed-node2 sudo[25920]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:37:14 managed-node2 systemd[25539]: Started D-Bus User Message Bus.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:14 managed-node2 systemd[25539]: Created slice user.slice.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:14 managed-node2 systemd[25539]: Started podman-25932.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:14 managed-node2 systemd[25539]: Started podman-pause-5a039c99.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:14 managed-node2 systemd[25539]: Started podman-25948.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:14 managed-node2 systemd[25539]: Started podman-25963.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:14 managed-node2 sudo[25920]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:37:15 managed-node2 platform-python[26093]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:37:15 managed-node2 platform-python[26216]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:37:16 managed-node2 platform-python[26339]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 12 12:37:16 managed-node2 platform-python[26438]: ansible-copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752338235.9099538-15874-160470408042927/source _original_basename=tmphfu4mgeo follow=False checksum=effe6499c246b4e7daac7803b02ca2cad861ad5c backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None\nJul 12 12:37:16 managed-node2 sudo[26563]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ggfotomaneyfnrfutjcomejzhhvgfhsm ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338236.608599-15911-85925477640473/AnsiballZ_podman_play.py'\nJul 12 12:37:16 managed-node2 sudo[26563]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:37:16 managed-node2 platform-python[26566]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:37:16 managed-node2 systemd[25539]: Started podman-26574.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:17 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6\nJul 12 12:37:17 managed-node2 systemd[25539]: Started rootless-netns-cfbb367e.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:17 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this.\nJul 12 12:37:17 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): veth14aad36c: link is not ready\nJul 12 12:37:17 managed-node2 kernel: cni-podman1: port 1(veth14aad36c) entered blocking state\nJul 12 12:37:17 managed-node2 kernel: cni-podman1: port 1(veth14aad36c) entered disabled state\nJul 12 12:37:17 managed-node2 kernel: device veth14aad36c entered promiscuous mode\nJul 12 12:37:17 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready\nJul 12 12:37:17 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready\nJul 12 12:37:17 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth14aad36c: link becomes ready\nJul 12 12:37:17 managed-node2 kernel: cni-podman1: port 1(veth14aad36c) entered blocking state\nJul 12 12:37:17 managed-node2 kernel: cni-podman1: port 1(veth14aad36c) entered forwarding state\nJul 12 12:37:17 managed-node2 dnsmasq[26760]: listening on cni-podman1(#3): 10.89.0.1\nJul 12 12:37:17 managed-node2 dnsmasq[26762]: started, version 2.79 cachesize 150\nJul 12 12:37:17 managed-node2 dnsmasq[26762]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify\nJul 12 12:37:17 managed-node2 dnsmasq[26762]: using local addresses only for domain dns.podman\nJul 12 12:37:17 managed-node2 dnsmasq[26762]: reading /etc/resolv.conf\nJul 12 12:37:17 managed-node2 dnsmasq[26762]: using local addresses only for domain dns.podman\nJul 12 12:37:17 managed-node2 dnsmasq[26762]: using nameserver 10.0.2.3#53\nJul 12 12:37:17 managed-node2 dnsmasq[26762]: using nameserver 10.29.169.13#53\nJul 12 12:37:17 managed-node2 dnsmasq[26762]: using nameserver 10.29.170.12#53\nJul 12 12:37:17 managed-node2 dnsmasq[26762]: using nameserver 10.2.32.1#53\nJul 12 12:37:17 managed-node2 dnsmasq[26762]: read /run/user/3001/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses\nJul 12 12:37:17 managed-node2 conmon[26776]: conmon db962b9f1559ffd15c96 : failed to write to /proc/self/oom_score_adj: Permission denied\nJul 12 12:37:17 managed-node2 conmon[26777]: conmon db962b9f1559ffd15c96 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach}\nJul 12 12:37:17 managed-node2 conmon[26777]: conmon db962b9f1559ffd15c96 : terminal_ctrl_fd: 14\nJul 12 12:37:17 managed-node2 conmon[26777]: conmon db962b9f1559ffd15c96 : winsz read side: 17, winsz write side: 18\nJul 12 12:37:17 managed-node2 conmon[26777]: conmon db962b9f1559ffd15c96 : container PID: 26787\nJul 12 12:37:17 managed-node2 conmon[26797]: conmon 8b812a2ec55f9de0cde0 : failed to write to /proc/self/oom_score_adj: Permission denied\nJul 12 12:37:17 managed-node2 conmon[26798]: conmon 8b812a2ec55f9de0cde0 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach}\nJul 12 12:37:17 managed-node2 conmon[26798]: conmon 8b812a2ec55f9de0cde0 : terminal_ctrl_fd: 13\nJul 12 12:37:17 managed-node2 conmon[26798]: conmon 8b812a2ec55f9de0cde0 : winsz read side: 16, winsz write side: 17\nJul 12 12:37:17 managed-node2 conmon[26798]: conmon 8b812a2ec55f9de0cde0 : container PID: 26808\nJul 12 12:37:17 managed-node2 platform-python[26566]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml\nJul 12 12:37:17 managed-node2 platform-python[26566]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod:\n 49a038584fa170b91928655aabc3334b950d706880329a96a99914f26bc7c59d\n Container:\n 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057\n \nJul 12 12:37:17 managed-node2 platform-python[26566]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time=\"2025-07-12T12:37:16-04:00\" level=info msg=\"/bin/podman filtering at log level debug\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2025-07-12T12:37:16-04:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Using graph root /home/podman_basic_user/.local/share/containers/storage\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Using run root /run/user/3001/containers\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Using tmp dir /run/user/3001/libpod/tmp\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Using transient store: false\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Cached value indicated that metacopy is not being used\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Cached value indicated that native-diff is usable\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Initializing event backend file\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\n time=\"2025-07-12T12:37:16-04:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Successfully loaded 1 networks\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"found free device name cni-podman1\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"found free ipv4 network subnet 10.89.0.0/24\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-12 12:37:16.97600692 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Successfully loaded 2 networks\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Looking up image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"reference \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\\\" does not resolve to an image ID\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"reference \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\\\" does not resolve to an image ID\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"FROM \\\"scratch\\\"\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Cached value indicated that idmapped mounts for overlay are not supported\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Check for idmapped mounts support \"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"overlay: test mount indicated that volatile is being used\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/221eaffe115c1eab98bc107fb1d33a30792204936b8794cac38b4263ec8bc263/empty,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/221eaffe115c1eab98bc107fb1d33a30792204936b8794cac38b4263ec8bc263/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/221eaffe115c1eab98bc107fb1d33a30792204936b8794cac38b4263ec8bc263/work,userxattr,volatile,context=\\\"system_u:object_r:container_file_t:s0:c99,c874\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Container ID: 74b081262df1d810c422dbcbe1db2f5a2adc384492d57cda98cbd9e90ab37ee1\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:copy Args:[/usr/libexec/podman/catatonit /catatonit] Flags:[] Attrs:map[] Message:COPY /usr/libexec/podman/catatonit /catatonit Heredocs:[] Original:COPY /usr/libexec/podman/catatonit /catatonit}\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"COPY []string(nil), imagebuilder.Copy{FromFS:false, From:\\\"\\\", Src:[]string{\\\"/usr/libexec/podman/catatonit\\\"}, Dest:\\\"/catatonit\\\", Download:false, Chown:\\\"\\\", Chmod:\\\"\\\", Checksum:\\\"\\\", Files:[]imagebuilder.File(nil)}\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"added content file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:entrypoint Args:[/catatonit -P] Flags:[] Attrs:map[json:true] Message:ENTRYPOINT /catatonit -P Heredocs:[] Original:ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]}\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"COMMIT localhost/podman-pause:4.9.4-dev-1708535009\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"COMMIT \\\"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"committing image with reference \\\"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\\\" is allowed by policy\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"layer list: [\\\"221eaffe115c1eab98bc107fb1d33a30792204936b8794cac38b4263ec8bc263\\\"]\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"using \\\"/var/tmp/buildah1838958819\\\" to hold temporary data\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Tar with options on /home/podman_basic_user/.local/share/containers/storage/overlay/221eaffe115c1eab98bc107fb1d33a30792204936b8794cac38b4263ec8bc263/diff\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"layer \\\"221eaffe115c1eab98bc107fb1d33a30792204936b8794cac38b4263ec8bc263\\\" size is 767488 bytes, uncompressed digest sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690, possibly-compressed digest sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"OCIv1 config = {\\\"created\\\":\\\"2025-07-12T16:37:17.118933835Z\\\",\\\"architecture\\\":\\\"amd64\\\",\\\"os\\\":\\\"linux\\\",\\\"config\\\":{\\\"Env\\\":[\\\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\\\"],\\\"Entrypoint\\\":[\\\"/catatonit\\\",\\\"-P\\\"],\\\"Labels\\\":{\\\"io.buildah.version\\\":\\\"1.33.5\\\"}},\\\"rootfs\\\":{\\\"type\\\":\\\"layers\\\",\\\"diff_ids\\\":[\\\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\\\"]},\\\"history\\\":[{\\\"created\\\":\\\"2025-07-12T16:37:17.118347731Z\\\",\\\"created_by\\\":\\\"/bin/sh -c #(nop) COPY file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd in /catatonit \\\",\\\"empty_layer\\\":true},{\\\"created\\\":\\\"2025-07-12T16:37:17.122165868Z\\\",\\\"created_by\\\":\\\"/bin/sh -c #(nop) ENTRYPOINT [\\\\\\\"/catatonit\\\\\\\", \\\\\\\"-P\\\\\\\"]\\\"}]}\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"OCIv1 manifest = {\\\"schemaVersion\\\":2,\\\"mediaType\\\":\\\"application/vnd.oci.image.manifest.v1+json\\\",\\\"config\\\":{\\\"mediaType\\\":\\\"application/vnd.oci.image.config.v1+json\\\",\\\"digest\\\":\\\"sha256:948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\\\",\\\"size\\\":668},\\\"layers\\\":[{\\\"mediaType\\\":\\\"application/vnd.oci.image.layer.v1.tar\\\",\\\"digest\\\":\\\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\\\",\\\"size\\\":767488}],\\\"annotations\\\":{\\\"org.opencontainers.image.base.digest\\\":\\\"\\\",\\\"org.opencontainers.image.base.name\\\":\\\"\\\"}}\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Docker v2s2 config = {\\\"created\\\":\\\"2025-07-12T16:37:17.118933835Z\\\",\\\"container\\\":\\\"74b081262df1d810c422dbcbe1db2f5a2adc384492d57cda98cbd9e90ab37ee1\\\",\\\"container_config\\\":{\\\"Hostname\\\":\\\"\\\",\\\"Domainname\\\":\\\"\\\",\\\"User\\\":\\\"\\\",\\\"AttachStdin\\\":false,\\\"AttachStdout\\\":false,\\\"AttachStderr\\\":false,\\\"Tty\\\":false,\\\"OpenStdin\\\":false,\\\"StdinOnce\\\":false,\\\"Env\\\":[\\\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\\\"],\\\"Cmd\\\":[],\\\"Image\\\":\\\"\\\",\\\"Volumes\\\":{},\\\"WorkingDir\\\":\\\"\\\",\\\"Entrypoint\\\":[\\\"/catatonit\\\",\\\"-P\\\"],\\\"OnBuild\\\":[],\\\"Labels\\\":{\\\"io.buildah.version\\\":\\\"1.33.5\\\"}},\\\"config\\\":{\\\"Hostname\\\":\\\"\\\",\\\"Domainname\\\":\\\"\\\",\\\"User\\\":\\\"\\\",\\\"AttachStdin\\\":false,\\\"AttachStdout\\\":false,\\\"AttachStderr\\\":false,\\\"Tty\\\":false,\\\"OpenStdin\\\":false,\\\"StdinOnce\\\":false,\\\"Env\\\":[\\\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\\\"],\\\"Cmd\\\":[],\\\"Image\\\":\\\"\\\",\\\"Volumes\\\":{},\\\"WorkingDir\\\":\\\"\\\",\\\"Entrypoint\\\":[\\\"/catatonit\\\",\\\"-P\\\"],\\\"OnBuild\\\":[],\\\"Labels\\\":{\\\"io.buildah.version\\\":\\\"1.33.5\\\"}},\\\"architecture\\\":\\\"amd64\\\",\\\"os\\\":\\\"linux\\\",\\\"rootfs\\\":{\\\"type\\\":\\\"layers\\\",\\\"diff_ids\\\":[\\\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\\\"]},\\\"history\\\":[{\\\"created\\\":\\\"2025-07-12T16:37:17.118347731Z\\\",\\\"created_by\\\":\\\"/bin/sh -c #(nop) COPY file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd in /catatonit \\\",\\\"empty_layer\\\":true},{\\\"created\\\":\\\"2025-07-12T16:37:17.122165868Z\\\",\\\"created_by\\\":\\\"/bin/sh -c #(nop) ENTRYPOINT [\\\\\\\"/catatonit\\\\\\\", \\\\\\\"-P\\\\\\\"]\\\"}]}\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Docker v2s2 manifest = {\\\"schemaVersion\\\":2,\\\"mediaType\\\":\\\"application/vnd.docker.distribution.manifest.v2+json\\\",\\\"config\\\":{\\\"mediaType\\\":\\\"application/vnd.docker.container.image.v1+json\\\",\\\"size\\\":1342,\\\"digest\\\":\\\"sha256:706c7e5b14dda8248bcff3ec5c250761bd8f764535609aa9365ce9e4b43361c2\\\"},\\\"layers\\\":[{\\\"mediaType\\\":\\\"application/vnd.docker.image.rootfs.diff.tar\\\",\\\"size\\\":767488,\\\"digest\\\":\\\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\\\"}]}\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Using SQLite blob info cache at /home/podman_basic_user/.local/share/containers/cache/blob-info-cache-v1.sqlite\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"IsRunningImageAllowed for image containers-storage:\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\" Using transport \\\"containers-storage\\\" policy section \"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\" Requirement 0: allowed\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Overall: allowed\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"start reading config\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"finished reading config\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Manifest has MIME type application/vnd.oci.image.manifest.v1+json, ordered candidate list [application/vnd.oci.image.manifest.v1+json, application/vnd.docker.distribution.manifest.v2+json, application/vnd.docker.distribution.manifest.v1+prettyjws, application/vnd.docker.distribution.manifest.v1+json]\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"... will first try using the original manifest unmodified\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Checking if we can reuse blob sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690: general substitution = true, compression for MIME type \\\"application/vnd.oci.image.layer.v1.tar\\\" = true\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"reading layer \\\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"No compression detected\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Using original blob without modification\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Applying tar in /home/podman_basic_user/.local/share/containers/storage/overlay/d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690/diff\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"finished reading layer \\\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"No compression detected\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Compression change for blob sha256:948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566 (\\\"application/vnd.oci.image.config.v1+json\\\") not supported\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Using original blob without modification\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"setting image creation date to 2025-07-12 16:37:17.118933835 +0000 UTC\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"created new image ID \\\"948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\\\" with metadata \\\"{}\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"added name \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" to image \\\"948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"printing final image id \\\"948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Pod using bridge network mode\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Got pod cgroup as /libpod_parent/49a038584fa170b91928655aabc3334b950d706880329a96a99914f26bc7c59d\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Looking up image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566)\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Inspecting image 948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Inspecting image 948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Inspecting image 948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"using systemd mode: false\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"setting container name 49a038584fa1-infra\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Loading seccomp profile from \\\"/usr/share/containers/seccomp.json\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Allocated lock 1 for container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Created container \\\"db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Container \\\"db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70\\\" has work directory \\\"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Container \\\"db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70\\\" has run directory \\\"/run/user/3001/containers/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Pulling image quay.io/libpod/testimage:20210610 (policy: missing)\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"using systemd mode: false\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"adding container to pod httpd1\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"setting container name httpd1-httpd1\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Loading seccomp profile from \\\"/usr/share/containers/seccomp.json\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=info msg=\"Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Adding mount /proc\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Adding mount /dev\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Adding mount /dev/pts\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Adding mount /dev/mqueue\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Adding mount /sys\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Adding mount /sys/fs/cgroup\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Allocated lock 2 for container 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Created container \\\"8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Container \\\"8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057\\\" has work directory \\\"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Container \\\"8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057\\\" has run directory \\\"/run/user/3001/containers/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Strongconnecting node db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Pushed db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 onto stack\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Finishing node db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70. Popped db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 off stack\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Strongconnecting node 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Pushed 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 onto stack\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Finishing node 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057. Popped 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 off stack\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/OM4I4NAT7NV6G6FUUDQFTEASSZ,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/a85a1e9326b5058f772b4fc440996b4ba136c02bad6966e471895a60f615298c/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/a85a1e9326b5058f772b4fc440996b4ba136c02bad6966e471895a60f615298c/work,userxattr,context=\\\"system_u:object_r:container_file_t:s0:c277,c351\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Mounted container \\\"db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70\\\" at \\\"/home/podman_basic_user/.local/share/containers/storage/overlay/a85a1e9326b5058f772b4fc440996b4ba136c02bad6966e471895a60f615298c/merged\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Created root filesystem for container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 at /home/podman_basic_user/.local/share/containers/storage/overlay/a85a1e9326b5058f772b4fc440996b4ba136c02bad6966e471895a60f615298c/merged\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Made network namespace at /run/user/3001/netns/netns-d0ac84ca-ca87-3466-1642-2cff38531036 for container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"creating rootless network namespace with name \\\"rootless-netns-d22c9f230d0691b8f418\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"slirp4netns command: /bin/slirp4netns --disable-host-loopback --mtu=65520 --enable-sandbox --enable-seccomp --enable-ipv6 -c -r 3 --netns-type=path /run/user/3001/netns/rootless-netns-d22c9f230d0691b8f418 tap0\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"The path of /etc/resolv.conf in the mount ns is \\\"/etc/resolv.conf\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"cni result for container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 network podman-default-kube-network: &{0.4.0 [{Name:cni-podman1 Mac:0a:fa:9a:36:b8:3a Sandbox:} {Name:veth14aad36c Mac:82:8b:99:b5:f7:b0 Sandbox:} {Name:eth0 Mac:b6:e7:40:6d:da:9c Sandbox:/run/user/3001/netns/netns-d0ac84ca-ca87-3466-1642-2cff38531036}] [{Version:4 Interface:0xc0008e9188 Address:{IP:10.89.0.2 Mask:ffffff00} Gateway:10.89.0.1}] [{Dst:{IP:0.0.0.0 Mask:00000000} GW:}] {[10.89.0.1] [dns.podman] []}}\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"rootlessport: time=\\\"2025-07-12T12:37:17-04:00\\\" level=info msg=\\\"Starting parent driver\\\"\\ntime=\\\"2025-07-12T12:37:17-04:00\\\" level=info msg=\\\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport4142254753/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport4142254753/.bp.sock]\\\"\\ntime=\\\"2025-07-12T12:37:17-04:00\\\" level=info msg=\\\"Starting child driver in child netns (\\\\\\\"/proc/self/exe\\\\\\\" [rootlessport-child])\\\"\\n\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"rootlessport: time=\\\"2025-07-12T12:37:17-04:00\\\" level=info msg=\\\"Waiting for initComplete\\\"\\n\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"rootlessport: time=\\\"2025-07-12T12:37:17-04:00\\\" level=info msg=\\\"initComplete is closed; parent and child established the communication channel\\\"\\n\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"rootlessport: time=\\\"2025-07-12T12:37:17-04:00\\\" level=info msg=\\\"Exposing ports [{ 80 15001 1 tcp}]\\\"\\n\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"rootlessport is ready\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"rootlessport: time=\\\"2025-07-12T12:37:17-04:00\\\" level=info msg=Ready\\n\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"/etc/system-fips does not exist on host, not mounting FIPS mode subscription\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"reading hooks from /usr/share/containers/oci/hooks.d\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Workdir \\\"/\\\" resolved to host path \\\"/home/podman_basic_user/.local/share/containers/storage/overlay/a85a1e9326b5058f772b4fc440996b4ba136c02bad6966e471895a60f615298c/merged\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Created OCI spec for container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata/config.json\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Got pod cgroup as \"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"/usr/bin/conmon messages will be logged to syslog\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"running conmon: /usr/bin/conmon\" args=\"[--api-version 1 -c db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 -u db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 -r /usr/bin/runc -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata -p /run/user/3001/containers/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata/pidfile -n 49a038584fa1-infra --exit-dir /run/user/3001/libpod/tmp/exits --full-attach -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg cgroupfs --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70]\"\n time=\"2025-07-12T12:37:17-04:00\" level=info msg=\"Failed to add conmon to cgroupfs sandbox cgroup: creating cgroup for cpu: mkdir /sys/fs/cgroup/cpu/libpod_parent: permission denied\"\n [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied\n \n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Received: 26787\"\n time=\"2025-07-12T12:37:17-04:00\" level=info msg=\"Got Conmon PID as 26777\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Created container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 in OCI runtime\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Adding nameserver(s) from network status of '[\\\"10.89.0.1\\\"]'\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Adding search domain(s) from network status of '[\\\"dns.podman\\\"]'\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Starting container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 with command [/catatonit -P]\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Started container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/SGU47AVGSROXANDACX3GODEDPF,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/1f9869d8c46f1dd99255378c38742ad9514c5c2f608c50250883573ed9f20064/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/1f9869d8c46f1dd99255378c38742ad9514c5c2f608c50250883573ed9f20064/work,userxattr,context=\\\"system_u:object_r:container_file_t:s0:c277,c351\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Mounted container \\\"8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057\\\" at \\\"/home/podman_basic_user/.local/share/containers/storage/overlay/1f9869d8c46f1dd99255378c38742ad9514c5c2f608c50250883573ed9f20064/merged\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Created root filesystem for container 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 at /home/podman_basic_user/.local/share/containers/storage/overlay/1f9869d8c46f1dd99255378c38742ad9514c5c2f608c50250883573ed9f20064/merged\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"/etc/system-fips does not exist on host, not mounting FIPS mode subscription\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"reading hooks from /usr/share/containers/oci/hooks.d\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Workdir \\\"/var/www\\\" resolved to a volume or mount\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Created OCI spec for container 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata/config.json\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Got pod cgroup as \"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"/usr/bin/conmon messages will be logged to syslog\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"running conmon: /usr/bin/conmon\" args=\"[--api-version 1 -c 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 -u 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 -r /usr/bin/runc -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata -p /run/user/3001/containers/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata/pidfile -n httpd1-httpd1 --exit-dir /run/user/3001/libpod/tmp/exits --full-attach -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg cgroupfs --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057]\"\n time=\"2025-07-12T12:37:17-04:00\" level=info msg=\"Failed to add conmon to cgroupfs sandbox cgroup: creating cgroup for cpu: mkdir /sys/fs/cgroup/cpu/conmon: permission denied\"\n [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied\n \n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Received: 26808\"\n time=\"2025-07-12T12:37:17-04:00\" level=info msg=\"Got Conmon PID as 26798\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Created container 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 in OCI runtime\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Starting container 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 with command [/bin/busybox-extras httpd -f -p 80]\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Started container 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Called kube.PersistentPostRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Shutting down engines\"\nJul 12 12:37:17 managed-node2 platform-python[26566]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0\nJul 12 12:37:17 managed-node2 sudo[26563]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:37:18 managed-node2 sudo[26939]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yxngcsmbouppolsnchwedyvvmqwcqmcp ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338237.9288857-15948-265909207589811/AnsiballZ_systemd.py'\nJul 12 12:37:18 managed-node2 sudo[26939]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:37:18 managed-node2 platform-python[26942]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None\nJul 12 12:37:18 managed-node2 systemd[25539]: Reloading.\nJul 12 12:37:18 managed-node2 sudo[26939]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:37:18 managed-node2 sudo[27076]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jxciagckwyaiwverlxxxicxpjcaamzpb ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338238.5066783-15974-208582760237043/AnsiballZ_systemd.py'\nJul 12 12:37:18 managed-node2 sudo[27076]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:37:18 managed-node2 dnsmasq[26762]: listening on cni-podman1(#3): fe80::8fa:9aff:fe36:b83a%cni-podman1\nJul 12 12:37:18 managed-node2 platform-python[27079]: ansible-systemd Invoked with name= scope=user enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None user=None\nJul 12 12:37:18 managed-node2 systemd[25539]: Reloading.\nJul 12 12:37:18 managed-node2 sudo[27076]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:37:19 managed-node2 sudo[27215]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-aeingsotugnwsviddfzcxglibrontkpg ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338239.1567352-16007-20750914035253/AnsiballZ_systemd.py'\nJul 12 12:37:19 managed-node2 sudo[27215]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:37:19 managed-node2 platform-python[27218]: ansible-systemd Invoked with name= scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None\nJul 12 12:37:19 managed-node2 systemd[25539]: Created slice podman\\x2dkube.slice.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:19 managed-node2 systemd[25539]: Starting A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit UNIT has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has begun starting up.\nJul 12 12:37:19 managed-node2 conmon[26798]: conmon 8b812a2ec55f9de0cde0 : container 26808 exited with status 137\nJul 12 12:37:19 managed-node2 conmon[26777]: conmon db962b9f1559ffd15c96 : container 26787 exited with status 137\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057)\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=info msg=\"Using sqlite as database backend\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70)\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=info msg=\"Using sqlite as database backend\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using graph driver overlay\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using graph root /home/podman_basic_user/.local/share/containers/storage\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using run root /run/user/3001/containers\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using tmp dir /run/user/3001/libpod/tmp\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using transient store: false\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Cached value indicated that metacopy is not being used\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Cached value indicated that native-diff is usable\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Initializing event backend file\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=info msg=\"Setting parallel job count to 7\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using graph driver overlay\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using graph root /home/podman_basic_user/.local/share/containers/storage\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using run root /run/user/3001/containers\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using tmp dir /run/user/3001/libpod/tmp\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using transient store: false\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Cached value indicated that metacopy is not being used\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Cached value indicated that native-diff is usable\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Initializing event backend file\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=info msg=\"Setting parallel job count to 7\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057)\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Shutting down engines\"\nJul 12 12:37:19 managed-node2 kernel: cni-podman1: port 1(veth14aad36c) entered disabled state\nJul 12 12:37:19 managed-node2 kernel: device veth14aad36c left promiscuous mode\nJul 12 12:37:19 managed-node2 kernel: cni-podman1: port 1(veth14aad36c) entered disabled state\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70)\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Shutting down engines\"\nJul 12 12:37:19 managed-node2 podman[27224]: Pods stopped:\nJul 12 12:37:19 managed-node2 podman[27224]: 49a038584fa170b91928655aabc3334b950d706880329a96a99914f26bc7c59d\nJul 12 12:37:19 managed-node2 podman[27224]: Pods removed:\nJul 12 12:37:19 managed-node2 podman[27224]: 49a038584fa170b91928655aabc3334b950d706880329a96a99914f26bc7c59d\nJul 12 12:37:19 managed-node2 podman[27224]: Secrets removed:\nJul 12 12:37:19 managed-node2 podman[27224]: Volumes removed:\nJul 12 12:37:20 managed-node2 systemd[25539]: Started rootless-netns-910042d3.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:20 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): veth73ffc199: link is not ready\nJul 12 12:37:20 managed-node2 kernel: cni-podman1: port 1(veth73ffc199) entered blocking state\nJul 12 12:37:20 managed-node2 kernel: cni-podman1: port 1(veth73ffc199) entered disabled state\nJul 12 12:37:20 managed-node2 kernel: device veth73ffc199 entered promiscuous mode\nJul 12 12:37:20 managed-node2 kernel: cni-podman1: port 1(veth73ffc199) entered blocking state\nJul 12 12:37:20 managed-node2 kernel: cni-podman1: port 1(veth73ffc199) entered forwarding state\nJul 12 12:37:20 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth73ffc199: link becomes ready\nJul 12 12:37:20 managed-node2 dnsmasq[27470]: listening on cni-podman1(#3): 10.89.0.1\nJul 12 12:37:20 managed-node2 dnsmasq[27472]: started, version 2.79 cachesize 150\nJul 12 12:37:20 managed-node2 dnsmasq[27472]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify\nJul 12 12:37:20 managed-node2 dnsmasq[27472]: using local addresses only for domain dns.podman\nJul 12 12:37:20 managed-node2 dnsmasq[27472]: reading /etc/resolv.conf\nJul 12 12:37:20 managed-node2 dnsmasq[27472]: using local addresses only for domain dns.podman\nJul 12 12:37:20 managed-node2 dnsmasq[27472]: using nameserver 10.0.2.3#53\nJul 12 12:37:20 managed-node2 dnsmasq[27472]: using nameserver 10.29.169.13#53\nJul 12 12:37:20 managed-node2 dnsmasq[27472]: using nameserver 10.29.170.12#53\nJul 12 12:37:20 managed-node2 dnsmasq[27472]: using nameserver 10.2.32.1#53\nJul 12 12:37:20 managed-node2 dnsmasq[27472]: read /run/user/3001/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses\nJul 12 12:37:20 managed-node2 podman[27224]: Pod:\nJul 12 12:37:20 managed-node2 podman[27224]: e6000b000d16f866b55a1a19dd3fb3262c82c732a816136db57b7e8368f0a32a\nJul 12 12:37:20 managed-node2 podman[27224]: Container:\nJul 12 12:37:20 managed-node2 podman[27224]: 3e84611729acf9a795f4d6223da39f911f01d8e5bb78d05b15144b66878ad807\nJul 12 12:37:20 managed-node2 systemd[25539]: Started A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:20 managed-node2 sudo[27215]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:37:21 managed-node2 platform-python[27649]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 12 12:37:21 managed-node2 platform-python[27773]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:37:22 managed-node2 dnsmasq[27472]: listening on cni-podman1(#3): fe80::c95:b4ff:fe67:d35c%cni-podman1\nJul 12 12:37:23 managed-node2 platform-python[27898]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:24 managed-node2 platform-python[28022]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:37:24 managed-node2 platform-python[28145]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:37:25 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:37:25 managed-node2 platform-python[28435]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:37:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:37:26 managed-node2 platform-python[28558]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:37:26 managed-node2 platform-python[28681]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 12 12:37:27 managed-node2 platform-python[28780]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752338246.5734363-16367-230792965661198/source _original_basename=tmpcx3lufsl follow=False checksum=d1d2b75756121a76b51c55942528a638a8e19d00 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None\nJul 12 12:37:27 managed-node2 platform-python[28905]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:37:27 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice.\n-- Subject: Unit machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8210] manager: (cni-podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3)\nJul 12 12:37:27 managed-node2 systemd-udevd[28952]: Using default interface naming scheme 'rhel-8.0'.\nJul 12 12:37:27 managed-node2 systemd-udevd[28953]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.\nJul 12 12:37:27 managed-node2 systemd-udevd[28953]: Could not generate persistent MAC address for vetha808c72b: No such file or directory\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8294] manager: (vetha808c72b): new Veth device (/org/freedesktop/NetworkManager/Devices/4)\nJul 12 12:37:27 managed-node2 systemd-udevd[28952]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.\nJul 12 12:37:27 managed-node2 systemd-udevd[28952]: Could not generate persistent MAC address for cni-podman1: No such file or directory\nJul 12 12:37:27 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vetha808c72b: link is not ready\nJul 12 12:37:27 managed-node2 kernel: cni-podman1: port 1(vetha808c72b) entered blocking state\nJul 12 12:37:27 managed-node2 kernel: cni-podman1: port 1(vetha808c72b) entered disabled state\nJul 12 12:37:27 managed-node2 kernel: device vetha808c72b entered promiscuous mode\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8417] device (cni-podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', sys-iface-state: 'external')\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8423] device (cni-podman1): state change: unavailable -> disconnected (reason 'connection-assumed', sys-iface-state: 'external')\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8433] device (cni-podman1): Activation: starting connection 'cni-podman1' (9399044c-ebcb-4319-aff1-7a172e94e2ea)\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8434] device (cni-podman1): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'external')\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8436] device (cni-podman1): state change: prepare -> config (reason 'none', sys-iface-state: 'external')\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8438] device (cni-podman1): state change: config -> ip-config (reason 'none', sys-iface-state: 'external')\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8440] device (cni-podman1): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'external')\nJul 12 12:37:27 managed-node2 dbus-daemon[601]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.5' (uid=0 pid=666 comm=\"/usr/sbin/NetworkManager --no-daemon \" label=\"system_u:system_r:NetworkManager_t:s0\")\nJul 12 12:37:27 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready\nJul 12 12:37:27 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready\nJul 12 12:37:27 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vetha808c72b: link becomes ready\nJul 12 12:37:27 managed-node2 kernel: cni-podman1: port 1(vetha808c72b) entered blocking state\nJul 12 12:37:27 managed-node2 kernel: cni-podman1: port 1(vetha808c72b) entered forwarding state\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8634] device (vetha808c72b): carrier: link connected\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8637] device (cni-podman1): carrier: link connected\nJul 12 12:37:27 managed-node2 systemd[1]: Starting Network Manager Script Dispatcher Service...\n-- Subject: Unit NetworkManager-dispatcher.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit NetworkManager-dispatcher.service has begun starting up.\nJul 12 12:37:27 managed-node2 dbus-daemon[601]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher'\nJul 12 12:37:27 managed-node2 systemd[1]: Started Network Manager Script Dispatcher Service.\n-- Subject: Unit NetworkManager-dispatcher.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit NetworkManager-dispatcher.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.9275] device (cni-podman1): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'external')\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.9277] device (cni-podman1): state change: secondaries -> activated (reason 'none', sys-iface-state: 'external')\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.9282] device (cni-podman1): Activation: successful, device activated.\nJul 12 12:37:28 managed-node2 dnsmasq[29076]: listening on cni-podman1(#3): 10.89.0.1\nJul 12 12:37:28 managed-node2 dnsmasq[29080]: started, version 2.79 cachesize 150\nJul 12 12:37:28 managed-node2 dnsmasq[29080]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify\nJul 12 12:37:28 managed-node2 dnsmasq[29080]: using local addresses only for domain dns.podman\nJul 12 12:37:28 managed-node2 dnsmasq[29080]: reading /etc/resolv.conf\nJul 12 12:37:28 managed-node2 dnsmasq[29080]: using local addresses only for domain dns.podman\nJul 12 12:37:28 managed-node2 dnsmasq[29080]: using nameserver 10.29.169.13#53\nJul 12 12:37:28 managed-node2 dnsmasq[29080]: using nameserver 10.29.170.12#53\nJul 12 12:37:28 managed-node2 dnsmasq[29080]: using nameserver 10.2.32.1#53\nJul 12 12:37:28 managed-node2 dnsmasq[29080]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses\nJul 12 12:37:28 managed-node2 systemd[1]: Started libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope.\n-- Subject: Unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:28 managed-node2 conmon[29082]: conmon 2bba312ac73f9a5e5631 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach}\nJul 12 12:37:28 managed-node2 conmon[29082]: conmon 2bba312ac73f9a5e5631 : terminal_ctrl_fd: 13\nJul 12 12:37:28 managed-node2 conmon[29082]: conmon 2bba312ac73f9a5e5631 : winsz read side: 17, winsz write side: 18\nJul 12 12:37:28 managed-node2 systemd[1]: Started libcontainer container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.\n-- Subject: Unit libpod-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:28 managed-node2 conmon[29082]: conmon 2bba312ac73f9a5e5631 : container PID: 29092\nJul 12 12:37:28 managed-node2 systemd[1]: Started libpod-conmon-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope.\n-- Subject: Unit libpod-conmon-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-conmon-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:28 managed-node2 conmon[29103]: conmon dde5d521943a5269ac5d : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach}\nJul 12 12:37:28 managed-node2 conmon[29103]: conmon dde5d521943a5269ac5d : terminal_ctrl_fd: 12\nJul 12 12:37:28 managed-node2 conmon[29103]: conmon dde5d521943a5269ac5d : winsz read side: 16, winsz write side: 17\nJul 12 12:37:28 managed-node2 systemd[1]: Started libcontainer container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.\n-- Subject: Unit libpod-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:28 managed-node2 conmon[29103]: conmon dde5d521943a5269ac5d : container PID: 29114\nJul 12 12:37:28 managed-node2 platform-python[28905]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml\nJul 12 12:37:28 managed-node2 platform-python[28905]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod:\n a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd\n Container:\n dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\n \nJul 12 12:37:28 managed-node2 platform-python[28905]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time=\"2025-07-12T12:37:27-04:00\" level=info msg=\"/usr/bin/podman filtering at log level debug\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Using run root /run/containers/storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Using tmp dir /run/libpod\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Using transient store: false\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\n time=\"2025-07-12T12:37:27-04:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Initializing event backend file\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-12 12:34:58.774465298 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Successfully loaded 2 networks\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Looking up image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2)\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Pod using bridge network mode\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice for parent machine.slice and name libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Looking up image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2)\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Inspecting image 4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Inspecting image 4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Inspecting image 4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"using systemd mode: false\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"setting container name a247d85c3822-infra\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Loading seccomp profile from \\\"/usr/share/containers/seccomp.json\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Allocated lock 1 for container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Cached value indicated that idmapped mounts for overlay are not supported\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Check for idmapped mounts support \"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Created container \\\"2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Container \\\"2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\\\" has work directory \\\"/var/lib/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Container \\\"2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\\\" has run directory \\\"/run/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Pulling image quay.io/libpod/testimage:20210610 (policy: missing)\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"using systemd mode: false\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"adding container to pod httpd2\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"setting container name httpd2-httpd2\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Loading seccomp profile from \\\"/usr/share/containers/seccomp.json\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=info msg=\"Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Adding mount /proc\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Adding mount /dev\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Adding mount /dev/pts\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Adding mount /dev/mqueue\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Adding mount /sys\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Adding mount /sys/fs/cgroup\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Allocated lock 2 for container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Created container \\\"dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Container \\\"dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\\\" has work directory \\\"/var/lib/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Container \\\"dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\\\" has run directory \\\"/run/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Strongconnecting node dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Pushed dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 onto stack\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Recursing to successor node 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Strongconnecting node 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Pushed 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 onto stack\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Finishing node 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07. Popped 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 off stack\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Finishing node dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0. Popped dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 off stack\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/BPQ67IPF3U2MS7MKOAJ6EE5AVL,upperdir=/var/lib/containers/storage/overlay/110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0/diff,workdir=/var/lib/containers/storage/overlay/110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0/work,nodev,metacopy=on,context=\\\"system_u:object_r:container_file_t:s0:c20,c130\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Mounted container \\\"2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\\\" at \\\"/var/lib/containers/storage/overlay/110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0/merged\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Created root filesystem for container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 at /var/lib/containers/storage/overlay/110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0/merged\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Made network namespace at /run/netns/netns-93660061-5819-4d54-dfec-784d954efe33 for container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"cni result for container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 network podman-default-kube-network: &{0.4.0 [{Name:cni-podman1 Mac:7e:63:02:ee:ed:5c Sandbox:} {Name:vetha808c72b Mac:8a:e4:ca:d3:1c:60 Sandbox:} {Name:eth0 Mac:f2:ab:50:c0:43:48 Sandbox:/run/netns/netns-93660061-5819-4d54-dfec-784d954efe33}] [{Version:4 Interface:0xc0006632b8 Address:{IP:10.89.0.2 Mask:ffffff00} Gateway:10.89.0.1}] [{Dst:{IP:0.0.0.0 Mask:00000000} GW:}] {[10.89.0.1] [dns.podman] []}}\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"/etc/system-fips does not exist on host, not mounting FIPS mode subscription\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Setting Cgroups for container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 to machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice:libpod:2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"reading hooks from /usr/share/containers/oci/hooks.d\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Workdir \\\"/\\\" resolved to host path \\\"/var/lib/containers/storage/overlay/110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0/merged\\\"\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Created OCI spec for container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 at /var/lib/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata/config.json\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice for parent machine.slice and name libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"/usr/bin/conmon messages will be logged to syslog\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"running conmon: /usr/bin/conmon\" args=\"[--api-version 1 -c 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 -u 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 -r /usr/bin/runc -b /var/lib/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata -p /run/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata/pidfile -n a247d85c3822-infra --exit-dir /run/libpod/exits --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07]\"\n time=\"2025-07-12T12:37:28-04:00\" level=info msg=\"Running conmon under slice machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice and unitName libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Received: 29092\"\n time=\"2025-07-12T12:37:28-04:00\" level=info msg=\"Got Conmon PID as 29082\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Created container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 in OCI runtime\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Adding nameserver(s) from network status of '[\\\"10.89.0.1\\\"]'\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Adding search domain(s) from network status of '[\\\"dns.podman\\\"]'\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Starting container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 with command [/catatonit -P]\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Started container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/UMCCOJYMJQIWGK7MOUSAJGNIT3,upperdir=/var/lib/containers/storage/overlay/4d39b8c35596ee353ef440ad2f35ea10e1beaa01c7e9bc9a2362e309c3d6bc72/diff,workdir=/var/lib/containers/storage/overlay/4d39b8c35596ee353ef440ad2f35ea10e1beaa01c7e9bc9a2362e309c3d6bc72/work,nodev,metacopy=on,context=\\\"system_u:object_r:container_file_t:s0:c20,c130\\\"\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Mounted container \\\"dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\\\" at \\\"/var/lib/containers/storage/overlay/4d39b8c35596ee353ef440ad2f35ea10e1beaa01c7e9bc9a2362e309c3d6bc72/merged\\\"\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Created root filesystem for container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 at /var/lib/containers/storage/overlay/4d39b8c35596ee353ef440ad2f35ea10e1beaa01c7e9bc9a2362e309c3d6bc72/merged\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"/etc/system-fips does not exist on host, not mounting FIPS mode subscription\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Setting Cgroups for container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 to machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice:libpod:dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"reading hooks from /usr/share/containers/oci/hooks.d\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Workdir \\\"/var/www\\\" resolved to a volume or mount\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Created OCI spec for container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 at /var/lib/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata/config.json\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice for parent machine.slice and name libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"/usr/bin/conmon messages will be logged to syslog\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"running conmon: /usr/bin/conmon\" args=\"[--api-version 1 -c dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 -u dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 -r /usr/bin/runc -b /var/lib/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata -p /run/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0]\"\n time=\"2025-07-12T12:37:28-04:00\" level=info msg=\"Running conmon under slice machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice and unitName libpod-conmon-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Received: 29114\"\n time=\"2025-07-12T12:37:28-04:00\" level=info msg=\"Got Conmon PID as 29103\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Created container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 in OCI runtime\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Starting container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 with command [/bin/busybox-extras httpd -f -p 80]\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Started container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Shutting down engines\"\nJul 12 12:37:28 managed-node2 platform-python[28905]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0\nJul 12 12:37:28 managed-node2 platform-python[29245]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None\nJul 12 12:37:28 managed-node2 systemd[1]: Reloading.\nJul 12 12:37:29 managed-node2 platform-python[29406]: ansible-systemd Invoked with name= scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None user=None\nJul 12 12:37:29 managed-node2 systemd[1]: Reloading.\nJul 12 12:37:29 managed-node2 dnsmasq[29080]: listening on cni-podman1(#3): fe80::7c63:2ff:feee:ed5c%cni-podman1\nJul 12 12:37:30 managed-node2 platform-python[29569]: ansible-systemd Invoked with name= scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None\nJul 12 12:37:30 managed-node2 systemd[1]: Created slice system-podman\\x2dkube.slice.\n-- Subject: Unit system-podman\\x2dkube.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit system-podman\\x2dkube.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:30 managed-node2 systemd[1]: Starting A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun starting up.\nJul 12 12:37:30 managed-node2 conmon[29082]: conmon 2bba312ac73f9a5e5631 : container 29092 exited with status 137\nJul 12 12:37:30 managed-node2 systemd[1]: libpod-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has successfully entered the 'dead' state.\nJul 12 12:37:30 managed-node2 systemd[1]: libpod-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope completed and consumed the indicated resources.\nJul 12 12:37:30 managed-node2 conmon[29103]: conmon dde5d521943a5269ac5d : container 29114 exited with status 137\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07)\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nJul 12 12:37:30 managed-node2 systemd[1]: libpod-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope has successfully entered the 'dead' state.\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nJul 12 12:37:30 managed-node2 systemd[1]: libpod-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope completed and consumed the indicated resources.\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=info msg=\"Using sqlite as database backend\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using graph driver overlay\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using run root /run/containers/storage\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using tmp dir /run/libpod\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using transient store: false\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Initializing event backend file\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=info msg=\"Setting parallel job count to 7\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0)\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=info msg=\"Using sqlite as database backend\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using graph driver overlay\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using run root /run/containers/storage\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using tmp dir /run/libpod\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using transient store: false\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Initializing event backend file\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=info msg=\"Setting parallel job count to 7\"\nJul 12 12:37:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay-4d39b8c35596ee353ef440ad2f35ea10e1beaa01c7e9bc9a2362e309c3d6bc72-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-4d39b8c35596ee353ef440ad2f35ea10e1beaa01c7e9bc9a2362e309c3d6bc72-merged.mount has successfully entered the 'dead' state.\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0)\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Shutting down engines\"\nJul 12 12:37:30 managed-node2 systemd[1]: libpod-conmon-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-conmon-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope has successfully entered the 'dead' state.\nJul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vetha808c72b) entered disabled state\nJul 12 12:37:30 managed-node2 kernel: device vetha808c72b left promiscuous mode\nJul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vetha808c72b) entered disabled state\nJul 12 12:37:30 managed-node2 systemd[1]: run-netns-netns\\x2d93660061\\x2d5819\\x2d4d54\\x2ddfec\\x2d784d954efe33.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-netns-netns\\x2d93660061\\x2d5819\\x2d4d54\\x2ddfec\\x2d784d954efe33.mount has successfully entered the 'dead' state.\nJul 12 12:37:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07-userdata-shm.mount has successfully entered the 'dead' state.\nJul 12 12:37:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay-110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0-merged.mount has successfully entered the 'dead' state.\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07)\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Shutting down engines\"\nJul 12 12:37:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:37:30 managed-node2 systemd[1]: Stopping libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope.\n-- Subject: Unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has begun shutting down.\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=info msg=\"Received shutdown signal \\\"terminated\\\", terminating!\" PID=29592\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=info msg=\"Invoking shutdown handler \\\"libpod\\\"\" PID=29592\nJul 12 12:37:30 managed-node2 systemd[1]: libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has successfully entered the 'dead' state.\nJul 12 12:37:30 managed-node2 systemd[1]: Stopped libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope.\n-- Subject: Unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has finished shutting down.\nJul 12 12:37:30 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice.\n-- Subject: Unit machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice has finished shutting down.\nJul 12 12:37:30 managed-node2 systemd[1]: machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice: Consumed 212ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice completed and consumed the indicated resources.\nJul 12 12:37:30 managed-node2 podman[29576]: Pods stopped:\nJul 12 12:37:30 managed-node2 podman[29576]: a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd\nJul 12 12:37:30 managed-node2 podman[29576]: Pods removed:\nJul 12 12:37:30 managed-node2 podman[29576]: a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd\nJul 12 12:37:30 managed-node2 podman[29576]: Secrets removed:\nJul 12 12:37:30 managed-node2 podman[29576]: Volumes removed:\nJul 12 12:37:30 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice.\n-- Subject: Unit machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:30 managed-node2 systemd[1]: Started libcontainer container af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.\n-- Subject: Unit libpod-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:30 managed-node2 systemd-udevd[29733]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.\nJul 12 12:37:30 managed-node2 systemd-udevd[29733]: Could not generate persistent MAC address for vethec9deee2: No such file or directory\nJul 12 12:37:30 managed-node2 NetworkManager[666]: [1752338250.7637] manager: (vethec9deee2): new Veth device (/org/freedesktop/NetworkManager/Devices/5)\nJul 12 12:37:30 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vethec9deee2: link is not ready\nJul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered blocking state\nJul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered disabled state\nJul 12 12:37:30 managed-node2 kernel: device vethec9deee2 entered promiscuous mode\nJul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered blocking state\nJul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered forwarding state\nJul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered disabled state\nJul 12 12:37:30 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vethec9deee2: link becomes ready\nJul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered blocking state\nJul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered forwarding state\nJul 12 12:37:30 managed-node2 NetworkManager[666]: [1752338250.7897] device (vethec9deee2): carrier: link connected\nJul 12 12:37:30 managed-node2 NetworkManager[666]: [1752338250.7911] device (cni-podman1): carrier: link connected\nJul 12 12:37:30 managed-node2 dnsmasq[29803]: listening on cni-podman1(#3): 10.89.0.1\nJul 12 12:37:30 managed-node2 dnsmasq[29803]: listening on cni-podman1(#3): fe80::7c63:2ff:feee:ed5c%cni-podman1\nJul 12 12:37:30 managed-node2 dnsmasq[29808]: started, version 2.79 cachesize 150\nJul 12 12:37:30 managed-node2 dnsmasq[29808]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify\nJul 12 12:37:30 managed-node2 dnsmasq[29808]: using local addresses only for domain dns.podman\nJul 12 12:37:30 managed-node2 dnsmasq[29808]: reading /etc/resolv.conf\nJul 12 12:37:30 managed-node2 dnsmasq[29808]: using local addresses only for domain dns.podman\nJul 12 12:37:30 managed-node2 dnsmasq[29808]: using nameserver 10.29.169.13#53\nJul 12 12:37:30 managed-node2 dnsmasq[29808]: using nameserver 10.29.170.12#53\nJul 12 12:37:30 managed-node2 dnsmasq[29808]: using nameserver 10.2.32.1#53\nJul 12 12:37:30 managed-node2 dnsmasq[29808]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses\nJul 12 12:37:30 managed-node2 systemd[1]: Started libcontainer container 39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.\n-- Subject: Unit libpod-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:31 managed-node2 systemd[1]: Started libcontainer container fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.\n-- Subject: Unit libpod-fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:31 managed-node2 podman[29576]: Pod:\nJul 12 12:37:31 managed-node2 podman[29576]: ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5\nJul 12 12:37:31 managed-node2 podman[29576]: Container:\nJul 12 12:37:31 managed-node2 podman[29576]: fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149\nJul 12 12:37:31 managed-node2 systemd[1]: Started A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:31 managed-node2 platform-python[29974]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:37:33 managed-node2 platform-python[30107]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:34 managed-node2 platform-python[30231]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:37:34 managed-node2 platform-python[30354]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:37:36 managed-node2 platform-python[30643]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:37:36 managed-node2 platform-python[30766]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:37:37 managed-node2 platform-python[30889]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 12 12:37:37 managed-node2 platform-python[30988]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752338256.766105-16794-202828239900759/source _original_basename=tmpvj89f27p follow=False checksum=92197531821af6a866eb3c8d736aa33d00262127 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None\nJul 12 12:37:37 managed-node2 platform-python[31113]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:37:37 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice.\n-- Subject: Unit machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:38 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nJul 12 12:37:38 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vetha724e550: link is not ready\nJul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered blocking state\nJul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered disabled state\nJul 12 12:37:38 managed-node2 kernel: device vetha724e550 entered promiscuous mode\nJul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered blocking state\nJul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered forwarding state\nJul 12 12:37:38 managed-node2 NetworkManager[666]: [1752338258.0378] manager: (vetha724e550): new Veth device (/org/freedesktop/NetworkManager/Devices/6)\nJul 12 12:37:38 managed-node2 systemd-udevd[31161]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.\nJul 12 12:37:38 managed-node2 systemd-udevd[31161]: Could not generate persistent MAC address for vetha724e550: No such file or directory\nJul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered disabled state\nJul 12 12:37:38 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready\nJul 12 12:37:38 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready\nJul 12 12:37:38 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vetha724e550: link becomes ready\nJul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered blocking state\nJul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered forwarding state\nJul 12 12:37:38 managed-node2 NetworkManager[666]: [1752338258.0795] device (vetha724e550): carrier: link connected\nJul 12 12:37:38 managed-node2 dnsmasq[29808]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 2 addresses\nJul 12 12:37:38 managed-node2 systemd[1]: Started libpod-conmon-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope.\n-- Subject: Unit libpod-conmon-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-conmon-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:38 managed-node2 systemd[1]: Started libcontainer container 8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.\n-- Subject: Unit libpod-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:38 managed-node2 systemd[1]: Started libpod-conmon-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope.\n-- Subject: Unit libpod-conmon-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-conmon-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:38 managed-node2 systemd[1]: Started libcontainer container 239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.\n-- Subject: Unit libpod-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:38 managed-node2 platform-python[31394]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None\nJul 12 12:37:38 managed-node2 systemd[1]: Reloading.\nJul 12 12:37:39 managed-node2 platform-python[31555]: ansible-systemd Invoked with name= scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None user=None\nJul 12 12:37:39 managed-node2 systemd[1]: Reloading.\nJul 12 12:37:40 managed-node2 platform-python[31710]: ansible-systemd Invoked with name= scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None\nJul 12 12:37:40 managed-node2 systemd[1]: Starting A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun starting up.\nJul 12 12:37:40 managed-node2 systemd[1]: libpod-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope has successfully entered the 'dead' state.\nJul 12 12:37:40 managed-node2 systemd[1]: libpod-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope completed and consumed the indicated resources.\nJul 12 12:37:40 managed-node2 systemd[1]: libpod-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope has successfully entered the 'dead' state.\nJul 12 12:37:40 managed-node2 systemd[1]: libpod-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope completed and consumed the indicated resources.\nJul 12 12:37:40 managed-node2 dnsmasq[29808]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses\nJul 12 12:37:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay-719154c260667d3aa74578747f416c045e6c4537dd0a7c671adf4544cf226e68-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-719154c260667d3aa74578747f416c045e6c4537dd0a7c671adf4544cf226e68-merged.mount has successfully entered the 'dead' state.\nJul 12 12:37:40 managed-node2 systemd[1]: libpod-conmon-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-conmon-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope has successfully entered the 'dead' state.\nJul 12 12:37:40 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered disabled state\nJul 12 12:37:40 managed-node2 kernel: device vetha724e550 left promiscuous mode\nJul 12 12:37:40 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered disabled state\nJul 12 12:37:40 managed-node2 systemd[1]: run-netns-netns\\x2d1bb9153f\\x2df22a\\x2dcc5d\\x2d3c7a\\x2dd87e5ee733ce.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-netns-netns\\x2d1bb9153f\\x2df22a\\x2dcc5d\\x2d3c7a\\x2dd87e5ee733ce.mount has successfully entered the 'dead' state.\nJul 12 12:37:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136-userdata-shm.mount has successfully entered the 'dead' state.\nJul 12 12:37:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay-eb2787269c2e2cd7be423803b1667df0aa39556214229872d965cd9cab309419-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-eb2787269c2e2cd7be423803b1667df0aa39556214229872d965cd9cab309419-merged.mount has successfully entered the 'dead' state.\nJul 12 12:37:40 managed-node2 systemd[1]: libpod-conmon-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-conmon-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope has successfully entered the 'dead' state.\nJul 12 12:37:40 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice.\n-- Subject: Unit machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice has finished shutting down.\nJul 12 12:37:40 managed-node2 systemd[1]: machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice: Consumed 199ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice completed and consumed the indicated resources.\nJul 12 12:37:40 managed-node2 podman[31717]: Pods stopped:\nJul 12 12:37:40 managed-node2 podman[31717]: 537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583\nJul 12 12:37:40 managed-node2 podman[31717]: Pods removed:\nJul 12 12:37:40 managed-node2 podman[31717]: 537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583\nJul 12 12:37:40 managed-node2 podman[31717]: Secrets removed:\nJul 12 12:37:40 managed-node2 podman[31717]: Volumes removed:\nJul 12 12:37:40 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice.\n-- Subject: Unit machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:40 managed-node2 systemd[1]: Started libcontainer container 7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.\n-- Subject: Unit libpod-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:40 managed-node2 NetworkManager[666]: [1752338260.9491] manager: (veth3fe74d71): new Veth device (/org/freedesktop/NetworkManager/Devices/7)\nJul 12 12:37:40 managed-node2 systemd-udevd[31882]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.\nJul 12 12:37:40 managed-node2 systemd-udevd[31882]: Could not generate persistent MAC address for veth3fe74d71: No such file or directory\nJul 12 12:37:40 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): veth3fe74d71: link is not ready\nJul 12 12:37:40 managed-node2 kernel: cni-podman1: port 2(veth3fe74d71) entered blocking state\nJul 12 12:37:40 managed-node2 kernel: cni-podman1: port 2(veth3fe74d71) entered disabled state\nJul 12 12:37:40 managed-node2 kernel: device veth3fe74d71 entered promiscuous mode\nJul 12 12:37:40 managed-node2 kernel: cni-podman1: port 2(veth3fe74d71) entered blocking state\nJul 12 12:37:40 managed-node2 kernel: cni-podman1: port 2(veth3fe74d71) entered forwarding state\nJul 12 12:37:40 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth3fe74d71: link becomes ready\nJul 12 12:37:40 managed-node2 NetworkManager[666]: [1752338260.9931] device (veth3fe74d71): carrier: link connected\nJul 12 12:37:41 managed-node2 dnsmasq[29808]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 2 addresses\nJul 12 12:37:41 managed-node2 systemd[1]: Started libcontainer container 304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.\n-- Subject: Unit libpod-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:41 managed-node2 systemd[1]: Started libcontainer container e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.\n-- Subject: Unit libpod-e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:41 managed-node2 systemd[1]: Started A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:41 managed-node2 podman[31717]: Pod:\nJul 12 12:37:41 managed-node2 podman[31717]: ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2\nJul 12 12:37:41 managed-node2 podman[31717]: Container:\nJul 12 12:37:41 managed-node2 podman[31717]: e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e\nJul 12 12:37:41 managed-node2 sudo[32116]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jyrknhzkjwtoyoqfhtaoymdanzpphasy ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338261.7921255-17011-231273247445257/AnsiballZ_command.py'\nJul 12 12:37:41 managed-node2 sudo[32116]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:37:42 managed-node2 platform-python[32119]: ansible-command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:42 managed-node2 systemd[25539]: Started podman-32128.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:42 managed-node2 sudo[32116]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:37:42 managed-node2 platform-python[32258]: ansible-command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:42 managed-node2 platform-python[32389]: ansible-command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:43 managed-node2 sudo[32528]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lhqlhpwddcodyczhbsyjvspptskrqirm ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338263.154581-17086-94452808741655/AnsiballZ_command.py'\nJul 12 12:37:43 managed-node2 sudo[32528]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:37:43 managed-node2 platform-python[32531]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:43 managed-node2 sudo[32528]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:37:43 managed-node2 platform-python[32657]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:44 managed-node2 platform-python[32783]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:44 managed-node2 platform-python[32909]: ansible-uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:37:45 managed-node2 platform-python[33033]: ansible-uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:37:45 managed-node2 rsyslogd[1025]: imjournal: journal files changed, reloading... [v8.2102.0-15.el8 try https://www.rsyslog.com/e/0 ]\nJul 12 12:37:45 managed-node2 platform-python[33158]: ansible-command Invoked with _raw_params=ls -alrtF /tmp/lsr__qviri30_podman/httpd1-create warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:45 managed-node2 platform-python[33282]: ansible-command Invoked with _raw_params=ls -alrtF /tmp/lsr__qviri30_podman/httpd2-create warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:46 managed-node2 platform-python[33406]: ansible-command Invoked with _raw_params=ls -alrtF /tmp/lsr__qviri30_podman/httpd3-create warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:49 managed-node2 platform-python[33655]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:50 managed-node2 platform-python[33784]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:37:53 managed-node2 platform-python[33909]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:37:56 managed-node2 platform-python[34032]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 12 12:37:56 managed-node2 platform-python[34159]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 12 12:37:57 managed-node2 platform-python[34286]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 12 12:37:59 managed-node2 platform-python[34409]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:38:02 managed-node2 platform-python[34532]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:38:05 managed-node2 platform-python[34655]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:38:08 managed-node2 platform-python[34778]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 12 12:38:10 managed-node2 platform-python[34939]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 12 12:38:10 managed-node2 platform-python[35062]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 12 12:38:15 managed-node2 platform-python[35185]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None\nJul 12 12:38:15 managed-node2 platform-python[35309]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:38:16 managed-node2 platform-python[35434]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:16 managed-node2 platform-python[35558]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:17 managed-node2 platform-python[35682]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:18 managed-node2 platform-python[35806]: ansible-command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None\nJul 12 12:38:19 managed-node2 platform-python[35929]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:19 managed-node2 platform-python[36052]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:19 managed-node2 sudo[36175]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sfjnrnyknupgcycrjkhhnhuswecfqpyf ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338299.776674-18742-47644857358508/AnsiballZ_podman_image.py'\nJul 12 12:38:19 managed-node2 sudo[36175]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:38:20 managed-node2 systemd[25539]: Started podman-36180.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:38:20 managed-node2 systemd[25539]: Started podman-36189.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:38:20 managed-node2 systemd[25539]: Started podman-36197.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:38:20 managed-node2 systemd[25539]: Started podman-36205.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:38:20 managed-node2 systemd[25539]: Started podman-36213.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:38:20 managed-node2 systemd[25539]: Started podman-36222.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:38:20 managed-node2 sudo[36175]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:38:21 managed-node2 platform-python[36351]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:38:21 managed-node2 platform-python[36476]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:22 managed-node2 platform-python[36599]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 12 12:38:22 managed-node2 platform-python[36663]: ansible-file Invoked with owner=podman_basic_user group=3001 mode=0644 dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _original_basename=tmpxhmslwri recurse=False state=file path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:22 managed-node2 sudo[36786]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kddzobyvwijhudrubugwpxpljmgfafhb ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338302.4767652-18857-261073031296101/AnsiballZ_podman_play.py'\nJul 12 12:38:22 managed-node2 sudo[36786]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:38:22 managed-node2 platform-python[36789]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:38:22 managed-node2 systemd[25539]: Started podman-36797.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:38:22 managed-node2 platform-python[36789]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml\nJul 12 12:38:22 managed-node2 platform-python[36789]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: \nJul 12 12:38:22 managed-node2 platform-python[36789]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time=\"2025-07-12T12:38:22-04:00\" level=info msg=\"/bin/podman filtering at log level debug\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2025-07-12T12:38:22-04:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Using graph root /home/podman_basic_user/.local/share/containers/storage\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Using run root /run/user/3001/containers\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Using tmp dir /run/user/3001/libpod/tmp\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Using transient store: false\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Cached value indicated that metacopy is not being used\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Cached value indicated that native-diff is usable\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Initializing event backend file\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\n time=\"2025-07-12T12:38:22-04:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-12 12:37:16.97600692 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Successfully loaded 2 networks\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Looking up image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\\\"\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566)\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\\\"\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Pod using bridge network mode\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Got pod cgroup as /libpod_parent/36ca61264e7e11a7ce277e40b51ec55a9afdcde0d1c0d8549c5c14e962eb5314\"\n Error: adding pod to state: name \"httpd1\" is in use: pod already exists\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Shutting down engines\"\nJul 12 12:38:22 managed-node2 platform-python[36789]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125\nJul 12 12:38:22 managed-node2 sudo[36786]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:38:23 managed-node2 platform-python[36952]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 12 12:38:24 managed-node2 platform-python[37076]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:38:25 managed-node2 platform-python[37201]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:26 managed-node2 platform-python[37325]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:27 managed-node2 platform-python[37448]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:28 managed-node2 platform-python[37737]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:38:28 managed-node2 platform-python[37862]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:29 managed-node2 platform-python[37985]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 12 12:38:29 managed-node2 platform-python[38049]: ansible-file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd2.yml _original_basename=tmpl5_fx80_ recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd2.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:29 managed-node2 platform-python[38172]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:38:29 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312.slice.\n-- Subject: Unit machine-libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:38:29 managed-node2 platform-python[38172]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml\nJul 12 12:38:29 managed-node2 platform-python[38172]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: \nJul 12 12:38:29 managed-node2 platform-python[38172]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time=\"2025-07-12T12:38:29-04:00\" level=info msg=\"/usr/bin/podman filtering at log level debug\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2025-07-12T12:38:29-04:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Using run root /run/containers/storage\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Using tmp dir /run/libpod\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Using transient store: false\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\n time=\"2025-07-12T12:38:29-04:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Initializing event backend file\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\n time=\"2025-07-12T12:38:29-04:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-12 12:34:58.774465298 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Successfully loaded 2 networks\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Looking up image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\\\"\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2)\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\\\"\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Pod using bridge network mode\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312.slice for parent machine.slice and name libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312.slice\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312.slice\"\n Error: adding pod to state: name \"httpd2\" is in use: pod already exists\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Shutting down engines\"\nJul 12 12:38:29 managed-node2 platform-python[38172]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125\nJul 12 12:38:31 managed-node2 platform-python[38333]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:38:32 managed-node2 platform-python[38458]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:33 managed-node2 platform-python[38582]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:33 managed-node2 platform-python[38705]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:35 managed-node2 platform-python[38995]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:38:35 managed-node2 platform-python[39120]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:35 managed-node2 platform-python[39243]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 12 12:38:36 managed-node2 platform-python[39307]: ansible-file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd3.yml _original_basename=tmpb1ttu3ws recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd3.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:36 managed-node2 platform-python[39430]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:38:36 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_76dbf1d1588fd5e46783829b7ad9b35366a8e6eaea8987a62e2fa617ab2d64d4.slice.\n-- Subject: Unit machine-libpod_pod_76dbf1d1588fd5e46783829b7ad9b35366a8e6eaea8987a62e2fa617ab2d64d4.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_76dbf1d1588fd5e46783829b7ad9b35366a8e6eaea8987a62e2fa617ab2d64d4.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:38:37 managed-node2 sudo[39591]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-reodnpkicydeipvtrpezylgtxbcjdhgz ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338317.3985052-19629-279558676694792/AnsiballZ_command.py'\nJul 12 12:38:37 managed-node2 sudo[39591]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:38:37 managed-node2 platform-python[39594]: ansible-command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:37 managed-node2 systemd[25539]: Started podman-39603.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:38:37 managed-node2 sudo[39591]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:38:38 managed-node2 platform-python[39733]: ansible-command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:38 managed-node2 platform-python[39864]: ansible-command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:38 managed-node2 sudo[39995]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jdxtrtiiowdglcaeyhyrkpgebggwzera ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338318.6450086-19659-70124315420202/AnsiballZ_command.py'\nJul 12 12:38:38 managed-node2 sudo[39995]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:38:38 managed-node2 platform-python[39998]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:38 managed-node2 sudo[39995]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:38:39 managed-node2 platform-python[40124]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:39 managed-node2 platform-python[40250]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:40 managed-node2 platform-python[40376]: ansible-uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:40 managed-node2 platform-python[40500]: ansible-uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:40 managed-node2 platform-python[40624]: ansible-uri Invoked with url=http://localhost:15003/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:43 managed-node2 platform-python[40873]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:44 managed-node2 platform-python[41002]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:38:47 managed-node2 platform-python[41127]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None\nJul 12 12:38:48 managed-node2 platform-python[41251]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:38:48 managed-node2 platform-python[41376]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:49 managed-node2 platform-python[41500]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:50 managed-node2 platform-python[41624]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:50 managed-node2 platform-python[41748]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:38:51 managed-node2 sudo[41873]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-phdckzktiusimljvxxeqcswlbkptcgje ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338330.9997387-20292-117543446474536/AnsiballZ_systemd.py'\nJul 12 12:38:51 managed-node2 sudo[41873]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:38:51 managed-node2 platform-python[41876]: ansible-systemd Invoked with name= scope=user state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 12 12:38:51 managed-node2 systemd[25539]: Reloading.\nJul 12 12:38:51 managed-node2 systemd[25539]: Stopping A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit UNIT has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has begun shutting down.\nJul 12 12:38:51 managed-node2 kernel: cni-podman1: port 1(veth73ffc199) entered disabled state\nJul 12 12:38:51 managed-node2 kernel: device veth73ffc199 left promiscuous mode\nJul 12 12:38:51 managed-node2 kernel: cni-podman1: port 1(veth73ffc199) entered disabled state\nJul 12 12:38:51 managed-node2 podman[41892]: Pods stopped:\nJul 12 12:38:51 managed-node2 podman[41892]: e6000b000d16f866b55a1a19dd3fb3262c82c732a816136db57b7e8368f0a32a\nJul 12 12:38:51 managed-node2 podman[41892]: Pods removed:\nJul 12 12:38:51 managed-node2 podman[41892]: e6000b000d16f866b55a1a19dd3fb3262c82c732a816136db57b7e8368f0a32a\nJul 12 12:38:51 managed-node2 podman[41892]: Secrets removed:\nJul 12 12:38:51 managed-node2 podman[41892]: Volumes removed:\nJul 12 12:38:51 managed-node2 systemd[25539]: Stopped A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 12 12:38:51 managed-node2 sudo[41873]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:38:52 managed-node2 platform-python[42165]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:38:52 managed-node2 sudo[42290]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nazeochktfswzfvlptenlckqnldzbmyv ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338332.4280062-20367-151061681885350/AnsiballZ_podman_play.py'\nJul 12 12:38:52 managed-node2 sudo[42290]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:38:52 managed-node2 platform-python[42293]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:38:52 managed-node2 platform-python[42293]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml\nJul 12 12:38:52 managed-node2 systemd[25539]: Started podman-42301.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:38:52 managed-node2 platform-python[42293]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman kube play --down /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml\nJul 12 12:38:52 managed-node2 platform-python[42293]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped:\n Pods removed:\n Secrets removed:\n Volumes removed:\nJul 12 12:38:52 managed-node2 platform-python[42293]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: \nJul 12 12:38:52 managed-node2 platform-python[42293]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0\nJul 12 12:38:52 managed-node2 sudo[42290]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:38:53 managed-node2 platform-python[42430]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:54 managed-node2 platform-python[42553]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 12 12:38:54 managed-node2 platform-python[42677]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:38:56 managed-node2 platform-python[42802]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:56 managed-node2 platform-python[42926]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 12 12:38:56 managed-node2 systemd[1]: Reloading.\nJul 12 12:38:57 managed-node2 systemd[1]: Stopping A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun shutting down.\nJul 12 12:38:57 managed-node2 systemd[1]: libpod-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.scope has successfully entered the 'dead' state.\nJul 12 12:38:57 managed-node2 systemd[1]: libpod-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.scope completed and consumed the indicated resources.\nJul 12 12:38:57 managed-node2 systemd[1]: libpod-fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.scope has successfully entered the 'dead' state.\nJul 12 12:38:57 managed-node2 systemd[1]: libpod-fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.scope: Consumed 34ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.scope completed and consumed the indicated resources.\nJul 12 12:38:57 managed-node2 dnsmasq[29808]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses\nJul 12 12:38:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay-ad05b883b876cb925ec05b9fafaf9a8a37fd48a25d5d54b9615f3f4cdf0bd3b3-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-ad05b883b876cb925ec05b9fafaf9a8a37fd48a25d5d54b9615f3f4cdf0bd3b3-merged.mount has successfully entered the 'dead' state.\nJul 12 12:38:57 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered disabled state\nJul 12 12:38:57 managed-node2 kernel: device vethec9deee2 left promiscuous mode\nJul 12 12:38:57 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered disabled state\nJul 12 12:38:57 managed-node2 systemd[1]: run-netns-netns\\x2d52414ca9\\x2df342\\x2dd1f3\\x2d8cce\\x2d232fb04744c1.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-netns-netns\\x2d52414ca9\\x2df342\\x2dd1f3\\x2d8cce\\x2d232fb04744c1.mount has successfully entered the 'dead' state.\nJul 12 12:38:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a-userdata-shm.mount has successfully entered the 'dead' state.\nJul 12 12:38:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay-2d63d07bf8161ced4731534605fa38c1618204d50fc3a412c2eb303e296f3b5e-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-2d63d07bf8161ced4731534605fa38c1618204d50fc3a412c2eb303e296f3b5e-merged.mount has successfully entered the 'dead' state.\nJul 12 12:38:57 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice.\n-- Subject: Unit machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice has finished shutting down.\nJul 12 12:38:57 managed-node2 systemd[1]: machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice: Consumed 67ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice completed and consumed the indicated resources.\nJul 12 12:38:57 managed-node2 podman[42962]: Pods stopped:\nJul 12 12:38:57 managed-node2 podman[42962]: ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5\nJul 12 12:38:57 managed-node2 podman[42962]: Pods removed:\nJul 12 12:38:57 managed-node2 podman[42962]: ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5\nJul 12 12:38:57 managed-node2 podman[42962]: Secrets removed:\nJul 12 12:38:57 managed-node2 podman[42962]: Volumes removed:\nJul 12 12:38:57 managed-node2 systemd[1]: libpod-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.scope has successfully entered the 'dead' state.\nJul 12 12:38:57 managed-node2 systemd[1]: libpod-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.scope: Consumed 34ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.scope completed and consumed the indicated resources.\nJul 12 12:38:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315-userdata-shm.mount has successfully entered the 'dead' state.\nJul 12 12:38:57 managed-node2 dnsmasq[29808]: exiting on receipt of SIGTERM\nJul 12 12:38:57 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has successfully entered the 'dead' state.\nJul 12 12:38:57 managed-node2 systemd[1]: Stopped A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished shutting down.\nJul 12 12:38:58 managed-node2 platform-python[43238]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:38:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay-495aa6291e9f835076198c3e1c7b8cf1909ca8b5400bdf0e5a851ba0c44119c1-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-495aa6291e9f835076198c3e1c7b8cf1909ca8b5400bdf0e5a851ba0c44119c1-merged.mount has successfully entered the 'dead' state.\nJul 12 12:38:58 managed-node2 platform-python[43363]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:38:58 managed-node2 platform-python[43363]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/httpd2.yml\nJul 12 12:38:58 managed-node2 platform-python[43363]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman kube play --down /etc/containers/ansible-kubernetes.d/httpd2.yml\nJul 12 12:38:58 managed-node2 platform-python[43363]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped:\n Pods removed:\n Secrets removed:\n Volumes removed:\nJul 12 12:38:58 managed-node2 platform-python[43363]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: \nJul 12 12:38:58 managed-node2 platform-python[43363]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0\nJul 12 12:38:58 managed-node2 platform-python[43499]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:39:00 managed-node2 platform-python[43622]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:01 managed-node2 platform-python[43747]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:02 managed-node2 platform-python[43871]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 12 12:39:02 managed-node2 systemd[1]: Reloading.\nJul 12 12:39:02 managed-node2 systemd[1]: Stopping A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun shutting down.\nJul 12 12:39:02 managed-node2 systemd[1]: libpod-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.scope has successfully entered the 'dead' state.\nJul 12 12:39:02 managed-node2 systemd[1]: libpod-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.scope completed and consumed the indicated resources.\nJul 12 12:39:02 managed-node2 systemd[1]: libpod-e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.scope has successfully entered the 'dead' state.\nJul 12 12:39:02 managed-node2 systemd[1]: libpod-e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.scope completed and consumed the indicated resources.\nJul 12 12:39:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay-9f3109ed9592a16625c27d2daaac765746798fb973c8fcb3160951dbc3c83474-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-9f3109ed9592a16625c27d2daaac765746798fb973c8fcb3160951dbc3c83474-merged.mount has successfully entered the 'dead' state.\nJul 12 12:39:02 managed-node2 kernel: cni-podman1: port 2(veth3fe74d71) entered disabled state\nJul 12 12:39:02 managed-node2 kernel: device veth3fe74d71 left promiscuous mode\nJul 12 12:39:02 managed-node2 kernel: cni-podman1: port 2(veth3fe74d71) entered disabled state\nJul 12 12:39:02 managed-node2 systemd[1]: run-netns-netns\\x2dda1f9efe\\x2d2607\\x2d2465\\x2d3389\\x2d63a80a061169.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-netns-netns\\x2dda1f9efe\\x2d2607\\x2d2465\\x2d3389\\x2d63a80a061169.mount has successfully entered the 'dead' state.\nJul 12 12:39:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70-userdata-shm.mount has successfully entered the 'dead' state.\nJul 12 12:39:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay-b047a8f535e44a79e89943c24ecd0f40472ad6c74487b61c695a5612de0f66e9-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-b047a8f535e44a79e89943c24ecd0f40472ad6c74487b61c695a5612de0f66e9-merged.mount has successfully entered the 'dead' state.\nJul 12 12:39:02 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice.\n-- Subject: Unit machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice has finished shutting down.\nJul 12 12:39:02 managed-node2 systemd[1]: machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice: Consumed 66ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice completed and consumed the indicated resources.\nJul 12 12:39:02 managed-node2 podman[43907]: Pods stopped:\nJul 12 12:39:02 managed-node2 podman[43907]: ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2\nJul 12 12:39:02 managed-node2 podman[43907]: Pods removed:\nJul 12 12:39:02 managed-node2 podman[43907]: ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2\nJul 12 12:39:02 managed-node2 podman[43907]: Secrets removed:\nJul 12 12:39:02 managed-node2 podman[43907]: Volumes removed:\nJul 12 12:39:02 managed-node2 systemd[1]: libpod-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.scope has successfully entered the 'dead' state.\nJul 12 12:39:02 managed-node2 systemd[1]: libpod-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.scope: Consumed 36ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.scope completed and consumed the indicated resources.\nJul 12 12:39:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b-userdata-shm.mount has successfully entered the 'dead' state.\nJul 12 12:39:02 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has successfully entered the 'dead' state.\nJul 12 12:39:02 managed-node2 systemd[1]: Stopped A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished shutting down.\nJul 12 12:39:03 managed-node2 platform-python[44179]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay-a2ca6c0802e64aa881912046899069f9906c3a3ecb7fd7f0e60445f767b453fb-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-a2ca6c0802e64aa881912046899069f9906c3a3ecb7fd7f0e60445f767b453fb-merged.mount has successfully entered the 'dead' state.\nJul 12 12:39:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:39:03 managed-node2 platform-python[44304]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:39:03 managed-node2 platform-python[44304]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/httpd3.yml\nJul 12 12:39:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:39:04 managed-node2 platform-python[44440]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:39:05 managed-node2 platform-python[44563]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None\nJul 12 12:39:05 managed-node2 platform-python[44687]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:05 managed-node2 sudo[44812]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-efsiwiyrgguftoqfmdsvrczsjrcdxihg ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338345.768981-21015-270850189165831/AnsiballZ_podman_container_info.py'\nJul 12 12:39:05 managed-node2 sudo[44812]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:39:06 managed-node2 platform-python[44815]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None\nJul 12 12:39:06 managed-node2 systemd[25539]: Started podman-44817.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:39:06 managed-node2 sudo[44812]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:39:06 managed-node2 sudo[44946]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lgxpwgdjqpsoqirugaueifldgtghyuxf ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338346.3029222-21038-175701710527734/AnsiballZ_command.py'\nJul 12 12:39:06 managed-node2 sudo[44946]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:39:06 managed-node2 platform-python[44949]: ansible-command Invoked with _raw_params=podman network ls -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:06 managed-node2 systemd[25539]: Started podman-44951.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:39:06 managed-node2 sudo[44946]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:39:06 managed-node2 sudo[45105]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-plsfjelikobxnwisunpzotpprpzjinoh ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338346.7950253-21068-8970032815672/AnsiballZ_command.py'\nJul 12 12:39:06 managed-node2 sudo[45105]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:39:07 managed-node2 platform-python[45108]: ansible-command Invoked with _raw_params=podman secret ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:07 managed-node2 systemd[25539]: Started podman-45110.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:39:07 managed-node2 sudo[45105]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:39:07 managed-node2 platform-python[45239]: ansible-command Invoked with removes=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl disable-linger podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None stdin=None\nJul 12 12:39:07 managed-node2 systemd[1]: Stopping User Manager for UID 3001...\n-- Subject: Unit user@3001.service has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user@3001.service has begun shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Stopping podman-pause-5a039c99.scope.\n-- Subject: Unit UNIT has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has begun shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Stopped target Default.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Removed slice podman\\x2dkube.slice.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Stopping D-Bus User Message Bus...\n-- Subject: Unit UNIT has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has begun shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Stopped D-Bus User Message Bus.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Stopped target Basic System.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Stopped target Timers.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Stopped Mark boot as successful after the user session has run 2 minutes.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Stopped target Paths.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Stopped target Sockets.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Closed D-Bus User Message Bus Socket.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Stopped podman-pause-5a039c99.scope.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Removed slice user.slice.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Reached target Shutdown.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:39:07 managed-node2 systemd[25539]: Started Exit the Session.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:39:07 managed-node2 systemd[25539]: Reached target Exit the Session.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:39:07 managed-node2 systemd[25545]: pam_unix(systemd-user:session): session closed for user podman_basic_user\nJul 12 12:39:07 managed-node2 systemd[1]: user@3001.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit user@3001.service has successfully entered the 'dead' state.\nJul 12 12:39:07 managed-node2 systemd[1]: Stopped User Manager for UID 3001.\n-- Subject: Unit user@3001.service has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user@3001.service has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[1]: Stopping User runtime directory /run/user/3001...\n-- Subject: Unit user-runtime-dir@3001.service has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user-runtime-dir@3001.service has begun shutting down.\nJul 12 12:39:07 managed-node2 systemd[1]: run-user-3001.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-user-3001.mount has successfully entered the 'dead' state.\nJul 12 12:39:07 managed-node2 systemd[1]: user-runtime-dir@3001.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit user-runtime-dir@3001.service has successfully entered the 'dead' state.\nJul 12 12:39:07 managed-node2 systemd[1]: Stopped User runtime directory /run/user/3001.\n-- Subject: Unit user-runtime-dir@3001.service has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user-runtime-dir@3001.service has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[1]: Removed slice User Slice of UID 3001.\n-- Subject: Unit user-3001.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user-3001.slice has finished shutting down.\nJul 12 12:39:07 managed-node2 platform-python[45371]: ansible-command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:08 managed-node2 sudo[45495]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gazcsbdiijzpmpohefmybwwgcnpxuufr ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338348.4017277-21172-132611654922840/AnsiballZ_command.py'\nJul 12 12:39:08 managed-node2 sudo[45495]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:39:08 managed-node2 platform-python[45498]: ansible-command Invoked with _raw_params=podman pod exists httpd1 warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:08 managed-node2 sudo[45495]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:39:09 managed-node2 platform-python[45628]: ansible-command Invoked with _raw_params=podman pod exists httpd2 warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:09 managed-node2 platform-python[45758]: ansible-command Invoked with _raw_params=podman pod exists httpd3 warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:09 managed-node2 sudo[45888]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pizwdchaqbkhharmotzkhmtxjzrasqsn ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338349.5796022-21223-271577239366846/AnsiballZ_command.py'\nJul 12 12:39:09 managed-node2 sudo[45888]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:39:09 managed-node2 platform-python[45891]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:09 managed-node2 sudo[45888]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:39:10 managed-node2 platform-python[46017]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:10 managed-node2 platform-python[46143]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:10 managed-node2 platform-python[46269]: ansible-stat Invoked with path=/var/lib/systemd/linger/podman_basic_user follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:13 managed-node2 platform-python[46517]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:14 managed-node2 platform-python[46646]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 12 12:39:15 managed-node2 platform-python[46770]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:17 managed-node2 platform-python[46895]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None\nJul 12 12:39:18 managed-node2 platform-python[47019]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:18 managed-node2 platform-python[47144]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:19 managed-node2 platform-python[47268]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:20 managed-node2 platform-python[47392]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:20 managed-node2 platform-python[47516]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:21 managed-node2 platform-python[47639]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:21 managed-node2 platform-python[47762]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:39:23 managed-node2 platform-python[47885]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 12 12:39:23 managed-node2 platform-python[48009]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:24 managed-node2 platform-python[48134]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:25 managed-node2 platform-python[48258]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 12 12:39:26 managed-node2 platform-python[48385]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:26 managed-node2 platform-python[48508]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:39:27 managed-node2 platform-python[48631]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:29 managed-node2 platform-python[48756]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:29 managed-node2 platform-python[48880]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 12 12:39:30 managed-node2 platform-python[49007]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:30 managed-node2 platform-python[49130]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:39:31 managed-node2 platform-python[49253]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None\nJul 12 12:39:32 managed-node2 platform-python[49377]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:32 managed-node2 platform-python[49500]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:39:33 managed-node2 platform-python[49623]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:39:36 managed-node2 platform-python[49785]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version', 'os_family'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d\nJul 12 12:39:36 managed-node2 platform-python[49912]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:36 managed-node2 platform-python[50035]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:39 managed-node2 platform-python[50283]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:40 managed-node2 platform-python[50412]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 12 12:39:41 managed-node2 platform-python[50536]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:43 managed-node2 platform-python[50700]: ansible-setup Invoked with gather_subset=['all'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d\nJul 12 12:39:46 managed-node2 platform-python[50852]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:47 managed-node2 platform-python[50975]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:49 managed-node2 platform-python[51223]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:50 managed-node2 platform-python[51352]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 12 12:39:50 managed-node2 platform-python[51476]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:56 managed-node2 platform-python[51640]: ansible-setup Invoked with gather_subset=['all'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d\nJul 12 12:39:57 managed-node2 platform-python[51792]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:57 managed-node2 platform-python[51915]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:58 managed-node2 platform-python[52039]: ansible-dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:40:02 managed-node2 platform-python[52167]: ansible-dnf Invoked with name=['certmonger'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:40:04 managed-node2 dbus-daemon[601]: [system] Reloaded configuration\nJul 12 12:40:04 managed-node2 dbus-daemon[601]: [system] Reloaded configuration\nJul 12 12:40:04 managed-node2 dbus-daemon[601]: [system] Reloaded configuration\nJul 12 12:40:04 managed-node2 dbus-daemon[601]: [system] Reloaded configuration\nJul 12 12:40:04 managed-node2 dbus-daemon[601]: [system] Reloaded configuration\nJul 12 12:40:04 managed-node2 systemd[1]: Reloading.\nJul 12 12:40:05 managed-node2 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update.\n-- Subject: Unit run-rbd3345bfad0b449fb2e69833e5ca39b9.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit run-rbd3345bfad0b449fb2e69833e5ca39b9.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:40:05 managed-node2 systemd[1]: Starting man-db-cache-update.service...\n-- Subject: Unit man-db-cache-update.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit man-db-cache-update.service has begun starting up.\nJul 12 12:40:05 managed-node2 systemd[1]: Reloading.\nJul 12 12:40:05 managed-node2 systemd[1]: man-db-cache-update.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit man-db-cache-update.service has successfully entered the 'dead' state.\nJul 12 12:40:05 managed-node2 systemd[1]: Started man-db-cache-update.service.\n-- Subject: Unit man-db-cache-update.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit man-db-cache-update.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:40:05 managed-node2 systemd[1]: run-rbd3345bfad0b449fb2e69833e5ca39b9.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-rbd3345bfad0b449fb2e69833e5ca39b9.service has successfully entered the 'dead' state.\nJul 12 12:40:06 managed-node2 platform-python[52799]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:40:06 managed-node2 platform-python[52922]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:40:07 managed-node2 platform-python[53045]: ansible-systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 12 12:40:07 managed-node2 systemd[1]: Reloading.\nJul 12 12:40:07 managed-node2 systemd[1]: Starting Certificate monitoring and PKI enrollment...\n-- Subject: Unit certmonger.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit certmonger.service has begun starting up.\nJul 12 12:40:07 managed-node2 systemd[1]: Started Certificate monitoring and PKI enrollment.\n-- Subject: Unit certmonger.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit certmonger.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:40:08 managed-node2 platform-python[53238]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=#\n # Ansible managed\n #\n # system_role:certificate\n booted=True provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53254]: Certificate in file \"/etc/pki/tls/certs/quadlet_demo.crt\" issued by CA and saved.\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 platform-python[53376]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt\nJul 12 12:40:09 managed-node2 platform-python[53499]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key\nJul 12 12:40:09 managed-node2 platform-python[53622]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt\nJul 12 12:40:10 managed-node2 platform-python[53745]: ansible-command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:40:10 managed-node2 certmonger[53081]: 2025-07-12 12:40:10 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:10 managed-node2 platform-python[53869]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:40:10 managed-node2 platform-python[53992]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:40:11 managed-node2 platform-python[54115]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:40:11 managed-node2 platform-python[54238]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:40:12 managed-node2 platform-python[54361]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:40:14 managed-node2 platform-python[54609]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:40:15 managed-node2 platform-python[54738]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 12 12:40:16 managed-node2 platform-python[54862]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:40:17 managed-node2 platform-python[54987]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:40:18 managed-node2 platform-python[55110]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:40:18 managed-node2 platform-python[55233]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:40:19 managed-node2 platform-python[55357]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:40:22 managed-node2 platform-python[55480]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 12 12:40:22 managed-node2 platform-python[55607]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 12 12:40:23 managed-node2 platform-python[55734]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 12 12:40:24 managed-node2 platform-python[55857]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 12 12:40:26 managed-node2 platform-python[55980]: ansible-command Invoked with _raw_params=journalctl -ex warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None", "task_name": "Dump journal", "task_path": "/tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:142" }, { "ansible_version": "2.9.27", "end_time": "2025-07-12T16:40:39.406291+00:00Z", "host": "managed-node2", "message": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "start_time": "2025-07-12T16:40:39.390295+00:00Z", "task_name": "Manage each secret", "task_path": "/tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41" }, { "ansible_version": "2.9.27", "delta": "0:00:00.027095", "end_time": "2025-07-12 12:40:40.451013", "host": "managed-node2", "message": "No message could be found", "rc": 0, "start_time": "2025-07-12 12:40:40.423918", "stdout": "-- Logs begin at Sat 2025-07-12 12:29:00 EDT, end at Sat 2025-07-12 12:40:40 EDT. --\nJul 12 12:34:28 managed-node2 platform-python[12277]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:34:28 managed-node2 platform-python[12400]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:34:29 managed-node2 platform-python[12523]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:34:29 managed-node2 platform-python[12647]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:34:32 managed-node2 platform-python[12770]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 12 12:34:33 managed-node2 platform-python[12897]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 12 12:34:33 managed-node2 systemd[1]: Reloading.\nJul 12 12:34:33 managed-node2 systemd[1]: Starting firewalld - dynamic firewall daemon...\n-- Subject: Unit firewalld.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit firewalld.service has begun starting up.\nJul 12 12:34:34 managed-node2 systemd[1]: Started firewalld - dynamic firewall daemon.\n-- Subject: Unit firewalld.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit firewalld.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:34:34 managed-node2 firewalld[12934]: WARNING: AllowZoneDrifting is enabled. This is considered an insecure configuration option. It will be removed in a future release. Please consider disabling it now.\nJul 12 12:34:35 managed-node2 platform-python[13124]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 12 12:34:36 managed-node2 platform-python[13247]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:34:36 managed-node2 platform-python[13370]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:34:37 managed-node2 platform-python[13493]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:34:40 managed-node2 platform-python[13616]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:34:42 managed-node2 platform-python[13739]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:34:45 managed-node2 dbus-daemon[601]: [system] Reloaded configuration\nJul 12 12:34:45 managed-node2 dbus-daemon[601]: [system] Reloaded configuration\nJul 12 12:34:45 managed-node2 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update.\n-- Subject: Unit run-rc4136976cbe94ee39dd82aa6d795790f.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit run-rc4136976cbe94ee39dd82aa6d795790f.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:34:45 managed-node2 systemd[1]: Starting man-db-cache-update.service...\n-- Subject: Unit man-db-cache-update.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit man-db-cache-update.service has begun starting up.\nJul 12 12:34:46 managed-node2 systemd[1]: man-db-cache-update.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit man-db-cache-update.service has successfully entered the 'dead' state.\nJul 12 12:34:46 managed-node2 systemd[1]: Started man-db-cache-update.service.\n-- Subject: Unit man-db-cache-update.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit man-db-cache-update.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:34:46 managed-node2 systemd[1]: run-rc4136976cbe94ee39dd82aa6d795790f.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-rc4136976cbe94ee39dd82aa6d795790f.service has successfully entered the 'dead' state.\nJul 12 12:34:46 managed-node2 platform-python[14345]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 12 12:34:47 managed-node2 platform-python[14493]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:34:48 managed-node2 platform-python[14617]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 12 12:34:49 managed-node2 kernel: SELinux: Converting 460 SID table entries...\nJul 12 12:34:49 managed-node2 kernel: SELinux: policy capability network_peer_controls=1\nJul 12 12:34:49 managed-node2 kernel: SELinux: policy capability open_perms=1\nJul 12 12:34:49 managed-node2 kernel: SELinux: policy capability extended_socket_class=1\nJul 12 12:34:49 managed-node2 kernel: SELinux: policy capability always_check_network=0\nJul 12 12:34:49 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1\nJul 12 12:34:49 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1\nJul 12 12:34:49 managed-node2 dbus-daemon[601]: [system] Reloaded configuration\nJul 12 12:34:50 managed-node2 platform-python[14744]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 12 12:34:55 managed-node2 platform-python[14867]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:34:57 managed-node2 platform-python[14992]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:34:57 managed-node2 platform-python[15115]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:34:57 managed-node2 platform-python[15238]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 12 12:34:58 managed-node2 platform-python[15337]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/nopull.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752338097.5592623-9962-32376786540712/source _original_basename=tmp571i0p6f follow=False checksum=d5dc917e3cae36de03aa971a17ac473f86fdf934 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None\nJul 12 12:34:58 managed-node2 platform-python[15462]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:34:58 managed-node2 kernel: evm: overlay not supported\nJul 12 12:34:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay-metacopy\\x2dcheck2773103887-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-metacopy\\x2dcheck2773103887-merged.mount has successfully entered the 'dead' state.\nJul 12 12:34:59 managed-node2 systemd[1]: Created slice machine.slice.\n-- Subject: Unit machine.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:34:59 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice.\n-- Subject: Unit machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:34:59 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:35:03 managed-node2 platform-python[15788]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:35:04 managed-node2 platform-python[15917]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:35:07 managed-node2 platform-python[16042]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:35:11 managed-node2 platform-python[16165]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 12 12:35:11 managed-node2 platform-python[16292]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 12 12:35:12 managed-node2 platform-python[16419]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 12 12:35:14 managed-node2 platform-python[16542]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:35:17 managed-node2 platform-python[16665]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:35:19 managed-node2 platform-python[16788]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:35:22 managed-node2 platform-python[16911]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 12 12:35:24 managed-node2 platform-python[17059]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 12 12:35:25 managed-node2 platform-python[17182]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 12 12:35:30 managed-node2 platform-python[17305]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:35:31 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:35:32 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:35:32 managed-node2 platform-python[17567]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:35:32 managed-node2 platform-python[17690]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:35:33 managed-node2 platform-python[17813]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 12 12:35:33 managed-node2 platform-python[17912]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/bogus.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752338133.0421798-11440-200522690369055/source _original_basename=tmpx4spj4rr follow=False checksum=f8266a972ed3be7e204d2a67883fe3a22b8dbf18 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None\nJul 12 12:35:34 managed-node2 platform-python[18037]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:35:34 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice.\n-- Subject: Unit machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:35:34 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:35:37 managed-node2 platform-python[18324]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:35:38 managed-node2 platform-python[18453]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:35:40 managed-node2 platform-python[18578]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:35:43 managed-node2 platform-python[18701]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 12 12:35:44 managed-node2 platform-python[18828]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 12 12:35:45 managed-node2 platform-python[18955]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 12 12:35:47 managed-node2 platform-python[19078]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:35:50 managed-node2 platform-python[19201]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:35:52 managed-node2 platform-python[19324]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:35:55 managed-node2 platform-python[19447]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 12 12:35:57 managed-node2 platform-python[19595]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 12 12:35:58 managed-node2 platform-python[19718]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 12 12:36:02 managed-node2 platform-python[19841]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:36:03 managed-node2 platform-python[19966]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:36:04 managed-node2 platform-python[20090]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 12 12:36:04 managed-node2 platform-python[20217]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:36:05 managed-node2 platform-python[20342]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:36:05 managed-node2 platform-python[20342]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/nopull.yml\nJul 12 12:36:05 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice.\n-- Subject: Unit machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice has finished shutting down.\nJul 12 12:36:05 managed-node2 systemd[1]: machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice: Consumed 0 CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_f4f71cfe3b2807ecdf33d52b6294a4380867abcb69993412d943ef4df3ac3ea0.slice completed and consumed the indicated resources.\nJul 12 12:36:05 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:36:05 managed-node2 platform-python[20480]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:36:06 managed-node2 platform-python[20603]: ansible-command Invoked with _raw_params=podman image prune -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:36:09 managed-node2 platform-python[20858]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:36:10 managed-node2 platform-python[20987]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:36:13 managed-node2 platform-python[21112]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:36:16 managed-node2 platform-python[21235]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 12 12:36:17 managed-node2 platform-python[21362]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 12 12:36:18 managed-node2 platform-python[21489]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 12 12:36:19 managed-node2 platform-python[21612]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:36:22 managed-node2 platform-python[21735]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:36:25 managed-node2 platform-python[21858]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:36:28 managed-node2 platform-python[21981]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 12 12:36:30 managed-node2 platform-python[22129]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 12 12:36:31 managed-node2 platform-python[22252]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 12 12:36:35 managed-node2 platform-python[22375]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:36:36 managed-node2 platform-python[22500]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:36:37 managed-node2 platform-python[22624]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 12 12:36:37 managed-node2 platform-python[22751]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:36:38 managed-node2 platform-python[22876]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:36:38 managed-node2 platform-python[22876]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/bogus.yml\nJul 12 12:36:38 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice.\n-- Subject: Unit machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice has finished shutting down.\nJul 12 12:36:38 managed-node2 systemd[1]: machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice: Consumed 0 CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_80146d39667a168ac04ade4a73d8e092e3d27a37f3c8a85525744b38aea9ae17.slice completed and consumed the indicated resources.\nJul 12 12:36:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:36:38 managed-node2 platform-python[23015]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:36:39 managed-node2 platform-python[23138]: ansible-command Invoked with _raw_params=podman image prune -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:36:39 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:36:42 managed-node2 platform-python[23394]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:36:44 managed-node2 platform-python[23523]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:36:46 managed-node2 platform-python[23648]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:36:50 managed-node2 platform-python[23771]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 12 12:36:50 managed-node2 platform-python[23898]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 12 12:36:51 managed-node2 platform-python[24025]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 12 12:36:52 managed-node2 platform-python[24148]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:36:55 managed-node2 platform-python[24271]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:36:58 managed-node2 platform-python[24394]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:37:01 managed-node2 platform-python[24517]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 12 12:37:03 managed-node2 platform-python[24665]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 12 12:37:04 managed-node2 platform-python[24788]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 12 12:37:08 managed-node2 platform-python[24911]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None\nJul 12 12:37:09 managed-node2 platform-python[25036]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:37:09 managed-node2 platform-python[25161]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:10 managed-node2 platform-python[25285]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:11 managed-node2 platform-python[25409]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:12 managed-node2 platform-python[25533]: ansible-command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None\nJul 12 12:37:12 managed-node2 systemd[1]: Created slice User Slice of UID 3001.\n-- Subject: Unit user-3001.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user-3001.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:12 managed-node2 systemd[1]: Starting User runtime directory /run/user/3001...\n-- Subject: Unit user-runtime-dir@3001.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user-runtime-dir@3001.service has begun starting up.\nJul 12 12:37:12 managed-node2 systemd[1]: Started User runtime directory /run/user/3001.\n-- Subject: Unit user-runtime-dir@3001.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user-runtime-dir@3001.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:12 managed-node2 systemd[1]: Starting User Manager for UID 3001...\n-- Subject: Unit user@3001.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user@3001.service has begun starting up.\nJul 12 12:37:12 managed-node2 systemd[25539]: pam_unix(systemd-user:session): session opened for user podman_basic_user by (uid=0)\nJul 12 12:37:12 managed-node2 systemd[25539]: Reached target Paths.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:12 managed-node2 systemd[25539]: Started Mark boot as successful after the user session has run 2 minutes.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:12 managed-node2 systemd[25539]: Starting D-Bus User Message Bus Socket.\n-- Subject: Unit UNIT has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has begun starting up.\nJul 12 12:37:12 managed-node2 systemd[25539]: Reached target Timers.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:12 managed-node2 systemd[25539]: Listening on D-Bus User Message Bus Socket.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:12 managed-node2 systemd[25539]: Reached target Sockets.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:12 managed-node2 systemd[25539]: Reached target Basic System.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:12 managed-node2 systemd[25539]: Reached target Default.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:12 managed-node2 systemd[25539]: Startup finished in 28ms.\n-- Subject: User manager start-up is now complete\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The user manager instance for user 3001 has been started. All services queued\n-- for starting have been started. Note that other services might still be starting\n-- up or be started at any later time.\n-- \n-- Startup of the manager took 28872 microseconds.\nJul 12 12:37:12 managed-node2 systemd[1]: Started User Manager for UID 3001.\n-- Subject: Unit user@3001.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user@3001.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:13 managed-node2 platform-python[25674]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:37:13 managed-node2 platform-python[25797]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:37:13 managed-node2 sudo[25920]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-flowetcsnhyltwcqlvhwzynouopxqrjl ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338233.6175296-15753-169052845046334/AnsiballZ_podman_image.py'\nJul 12 12:37:13 managed-node2 sudo[25920]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:37:14 managed-node2 systemd[25539]: Started D-Bus User Message Bus.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:14 managed-node2 systemd[25539]: Created slice user.slice.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:14 managed-node2 systemd[25539]: Started podman-25932.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:14 managed-node2 systemd[25539]: Started podman-pause-5a039c99.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:14 managed-node2 systemd[25539]: Started podman-25948.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:14 managed-node2 systemd[25539]: Started podman-25963.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:14 managed-node2 sudo[25920]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:37:15 managed-node2 platform-python[26093]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:37:15 managed-node2 platform-python[26216]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:37:16 managed-node2 platform-python[26339]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 12 12:37:16 managed-node2 platform-python[26438]: ansible-copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752338235.9099538-15874-160470408042927/source _original_basename=tmphfu4mgeo follow=False checksum=effe6499c246b4e7daac7803b02ca2cad861ad5c backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None\nJul 12 12:37:16 managed-node2 sudo[26563]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ggfotomaneyfnrfutjcomejzhhvgfhsm ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338236.608599-15911-85925477640473/AnsiballZ_podman_play.py'\nJul 12 12:37:16 managed-node2 sudo[26563]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:37:16 managed-node2 platform-python[26566]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:37:16 managed-node2 systemd[25539]: Started podman-26574.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:17 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6\nJul 12 12:37:17 managed-node2 systemd[25539]: Started rootless-netns-cfbb367e.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:17 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this.\nJul 12 12:37:17 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): veth14aad36c: link is not ready\nJul 12 12:37:17 managed-node2 kernel: cni-podman1: port 1(veth14aad36c) entered blocking state\nJul 12 12:37:17 managed-node2 kernel: cni-podman1: port 1(veth14aad36c) entered disabled state\nJul 12 12:37:17 managed-node2 kernel: device veth14aad36c entered promiscuous mode\nJul 12 12:37:17 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready\nJul 12 12:37:17 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready\nJul 12 12:37:17 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth14aad36c: link becomes ready\nJul 12 12:37:17 managed-node2 kernel: cni-podman1: port 1(veth14aad36c) entered blocking state\nJul 12 12:37:17 managed-node2 kernel: cni-podman1: port 1(veth14aad36c) entered forwarding state\nJul 12 12:37:17 managed-node2 dnsmasq[26760]: listening on cni-podman1(#3): 10.89.0.1\nJul 12 12:37:17 managed-node2 dnsmasq[26762]: started, version 2.79 cachesize 150\nJul 12 12:37:17 managed-node2 dnsmasq[26762]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify\nJul 12 12:37:17 managed-node2 dnsmasq[26762]: using local addresses only for domain dns.podman\nJul 12 12:37:17 managed-node2 dnsmasq[26762]: reading /etc/resolv.conf\nJul 12 12:37:17 managed-node2 dnsmasq[26762]: using local addresses only for domain dns.podman\nJul 12 12:37:17 managed-node2 dnsmasq[26762]: using nameserver 10.0.2.3#53\nJul 12 12:37:17 managed-node2 dnsmasq[26762]: using nameserver 10.29.169.13#53\nJul 12 12:37:17 managed-node2 dnsmasq[26762]: using nameserver 10.29.170.12#53\nJul 12 12:37:17 managed-node2 dnsmasq[26762]: using nameserver 10.2.32.1#53\nJul 12 12:37:17 managed-node2 dnsmasq[26762]: read /run/user/3001/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses\nJul 12 12:37:17 managed-node2 conmon[26776]: conmon db962b9f1559ffd15c96 : failed to write to /proc/self/oom_score_adj: Permission denied\nJul 12 12:37:17 managed-node2 conmon[26777]: conmon db962b9f1559ffd15c96 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach}\nJul 12 12:37:17 managed-node2 conmon[26777]: conmon db962b9f1559ffd15c96 : terminal_ctrl_fd: 14\nJul 12 12:37:17 managed-node2 conmon[26777]: conmon db962b9f1559ffd15c96 : winsz read side: 17, winsz write side: 18\nJul 12 12:37:17 managed-node2 conmon[26777]: conmon db962b9f1559ffd15c96 : container PID: 26787\nJul 12 12:37:17 managed-node2 conmon[26797]: conmon 8b812a2ec55f9de0cde0 : failed to write to /proc/self/oom_score_adj: Permission denied\nJul 12 12:37:17 managed-node2 conmon[26798]: conmon 8b812a2ec55f9de0cde0 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach}\nJul 12 12:37:17 managed-node2 conmon[26798]: conmon 8b812a2ec55f9de0cde0 : terminal_ctrl_fd: 13\nJul 12 12:37:17 managed-node2 conmon[26798]: conmon 8b812a2ec55f9de0cde0 : winsz read side: 16, winsz write side: 17\nJul 12 12:37:17 managed-node2 conmon[26798]: conmon 8b812a2ec55f9de0cde0 : container PID: 26808\nJul 12 12:37:17 managed-node2 platform-python[26566]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml\nJul 12 12:37:17 managed-node2 platform-python[26566]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod:\n 49a038584fa170b91928655aabc3334b950d706880329a96a99914f26bc7c59d\n Container:\n 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057\n \nJul 12 12:37:17 managed-node2 platform-python[26566]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time=\"2025-07-12T12:37:16-04:00\" level=info msg=\"/bin/podman filtering at log level debug\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2025-07-12T12:37:16-04:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Using graph root /home/podman_basic_user/.local/share/containers/storage\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Using run root /run/user/3001/containers\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Using tmp dir /run/user/3001/libpod/tmp\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Using transient store: false\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Cached value indicated that metacopy is not being used\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Cached value indicated that native-diff is usable\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Initializing event backend file\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\n time=\"2025-07-12T12:37:16-04:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Successfully loaded 1 networks\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"found free device name cni-podman1\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"found free ipv4 network subnet 10.89.0.0/24\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-12 12:37:16.97600692 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Successfully loaded 2 networks\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Looking up image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"reference \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\\\" does not resolve to an image ID\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"reference \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\\\" does not resolve to an image ID\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"FROM \\\"scratch\\\"\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Cached value indicated that idmapped mounts for overlay are not supported\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Check for idmapped mounts support \"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:16-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"overlay: test mount indicated that volatile is being used\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/221eaffe115c1eab98bc107fb1d33a30792204936b8794cac38b4263ec8bc263/empty,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/221eaffe115c1eab98bc107fb1d33a30792204936b8794cac38b4263ec8bc263/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/221eaffe115c1eab98bc107fb1d33a30792204936b8794cac38b4263ec8bc263/work,userxattr,volatile,context=\\\"system_u:object_r:container_file_t:s0:c99,c874\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Container ID: 74b081262df1d810c422dbcbe1db2f5a2adc384492d57cda98cbd9e90ab37ee1\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:copy Args:[/usr/libexec/podman/catatonit /catatonit] Flags:[] Attrs:map[] Message:COPY /usr/libexec/podman/catatonit /catatonit Heredocs:[] Original:COPY /usr/libexec/podman/catatonit /catatonit}\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"COPY []string(nil), imagebuilder.Copy{FromFS:false, From:\\\"\\\", Src:[]string{\\\"/usr/libexec/podman/catatonit\\\"}, Dest:\\\"/catatonit\\\", Download:false, Chown:\\\"\\\", Chmod:\\\"\\\", Checksum:\\\"\\\", Files:[]imagebuilder.File(nil)}\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"added content file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:entrypoint Args:[/catatonit -P] Flags:[] Attrs:map[json:true] Message:ENTRYPOINT /catatonit -P Heredocs:[] Original:ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]}\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"COMMIT localhost/podman-pause:4.9.4-dev-1708535009\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"COMMIT \\\"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"committing image with reference \\\"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\\\" is allowed by policy\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"layer list: [\\\"221eaffe115c1eab98bc107fb1d33a30792204936b8794cac38b4263ec8bc263\\\"]\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"using \\\"/var/tmp/buildah1838958819\\\" to hold temporary data\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Tar with options on /home/podman_basic_user/.local/share/containers/storage/overlay/221eaffe115c1eab98bc107fb1d33a30792204936b8794cac38b4263ec8bc263/diff\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"layer \\\"221eaffe115c1eab98bc107fb1d33a30792204936b8794cac38b4263ec8bc263\\\" size is 767488 bytes, uncompressed digest sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690, possibly-compressed digest sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"OCIv1 config = {\\\"created\\\":\\\"2025-07-12T16:37:17.118933835Z\\\",\\\"architecture\\\":\\\"amd64\\\",\\\"os\\\":\\\"linux\\\",\\\"config\\\":{\\\"Env\\\":[\\\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\\\"],\\\"Entrypoint\\\":[\\\"/catatonit\\\",\\\"-P\\\"],\\\"Labels\\\":{\\\"io.buildah.version\\\":\\\"1.33.5\\\"}},\\\"rootfs\\\":{\\\"type\\\":\\\"layers\\\",\\\"diff_ids\\\":[\\\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\\\"]},\\\"history\\\":[{\\\"created\\\":\\\"2025-07-12T16:37:17.118347731Z\\\",\\\"created_by\\\":\\\"/bin/sh -c #(nop) COPY file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd in /catatonit \\\",\\\"empty_layer\\\":true},{\\\"created\\\":\\\"2025-07-12T16:37:17.122165868Z\\\",\\\"created_by\\\":\\\"/bin/sh -c #(nop) ENTRYPOINT [\\\\\\\"/catatonit\\\\\\\", \\\\\\\"-P\\\\\\\"]\\\"}]}\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"OCIv1 manifest = {\\\"schemaVersion\\\":2,\\\"mediaType\\\":\\\"application/vnd.oci.image.manifest.v1+json\\\",\\\"config\\\":{\\\"mediaType\\\":\\\"application/vnd.oci.image.config.v1+json\\\",\\\"digest\\\":\\\"sha256:948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\\\",\\\"size\\\":668},\\\"layers\\\":[{\\\"mediaType\\\":\\\"application/vnd.oci.image.layer.v1.tar\\\",\\\"digest\\\":\\\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\\\",\\\"size\\\":767488}],\\\"annotations\\\":{\\\"org.opencontainers.image.base.digest\\\":\\\"\\\",\\\"org.opencontainers.image.base.name\\\":\\\"\\\"}}\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Docker v2s2 config = {\\\"created\\\":\\\"2025-07-12T16:37:17.118933835Z\\\",\\\"container\\\":\\\"74b081262df1d810c422dbcbe1db2f5a2adc384492d57cda98cbd9e90ab37ee1\\\",\\\"container_config\\\":{\\\"Hostname\\\":\\\"\\\",\\\"Domainname\\\":\\\"\\\",\\\"User\\\":\\\"\\\",\\\"AttachStdin\\\":false,\\\"AttachStdout\\\":false,\\\"AttachStderr\\\":false,\\\"Tty\\\":false,\\\"OpenStdin\\\":false,\\\"StdinOnce\\\":false,\\\"Env\\\":[\\\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\\\"],\\\"Cmd\\\":[],\\\"Image\\\":\\\"\\\",\\\"Volumes\\\":{},\\\"WorkingDir\\\":\\\"\\\",\\\"Entrypoint\\\":[\\\"/catatonit\\\",\\\"-P\\\"],\\\"OnBuild\\\":[],\\\"Labels\\\":{\\\"io.buildah.version\\\":\\\"1.33.5\\\"}},\\\"config\\\":{\\\"Hostname\\\":\\\"\\\",\\\"Domainname\\\":\\\"\\\",\\\"User\\\":\\\"\\\",\\\"AttachStdin\\\":false,\\\"AttachStdout\\\":false,\\\"AttachStderr\\\":false,\\\"Tty\\\":false,\\\"OpenStdin\\\":false,\\\"StdinOnce\\\":false,\\\"Env\\\":[\\\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\\\"],\\\"Cmd\\\":[],\\\"Image\\\":\\\"\\\",\\\"Volumes\\\":{},\\\"WorkingDir\\\":\\\"\\\",\\\"Entrypoint\\\":[\\\"/catatonit\\\",\\\"-P\\\"],\\\"OnBuild\\\":[],\\\"Labels\\\":{\\\"io.buildah.version\\\":\\\"1.33.5\\\"}},\\\"architecture\\\":\\\"amd64\\\",\\\"os\\\":\\\"linux\\\",\\\"rootfs\\\":{\\\"type\\\":\\\"layers\\\",\\\"diff_ids\\\":[\\\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\\\"]},\\\"history\\\":[{\\\"created\\\":\\\"2025-07-12T16:37:17.118347731Z\\\",\\\"created_by\\\":\\\"/bin/sh -c #(nop) COPY file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd in /catatonit \\\",\\\"empty_layer\\\":true},{\\\"created\\\":\\\"2025-07-12T16:37:17.122165868Z\\\",\\\"created_by\\\":\\\"/bin/sh -c #(nop) ENTRYPOINT [\\\\\\\"/catatonit\\\\\\\", \\\\\\\"-P\\\\\\\"]\\\"}]}\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Docker v2s2 manifest = {\\\"schemaVersion\\\":2,\\\"mediaType\\\":\\\"application/vnd.docker.distribution.manifest.v2+json\\\",\\\"config\\\":{\\\"mediaType\\\":\\\"application/vnd.docker.container.image.v1+json\\\",\\\"size\\\":1342,\\\"digest\\\":\\\"sha256:706c7e5b14dda8248bcff3ec5c250761bd8f764535609aa9365ce9e4b43361c2\\\"},\\\"layers\\\":[{\\\"mediaType\\\":\\\"application/vnd.docker.image.rootfs.diff.tar\\\",\\\"size\\\":767488,\\\"digest\\\":\\\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\\\"}]}\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Using SQLite blob info cache at /home/podman_basic_user/.local/share/containers/cache/blob-info-cache-v1.sqlite\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"IsRunningImageAllowed for image containers-storage:\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\" Using transport \\\"containers-storage\\\" policy section \"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\" Requirement 0: allowed\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Overall: allowed\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"start reading config\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"finished reading config\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Manifest has MIME type application/vnd.oci.image.manifest.v1+json, ordered candidate list [application/vnd.oci.image.manifest.v1+json, application/vnd.docker.distribution.manifest.v2+json, application/vnd.docker.distribution.manifest.v1+prettyjws, application/vnd.docker.distribution.manifest.v1+json]\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"... will first try using the original manifest unmodified\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Checking if we can reuse blob sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690: general substitution = true, compression for MIME type \\\"application/vnd.oci.image.layer.v1.tar\\\" = true\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"reading layer \\\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"No compression detected\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Using original blob without modification\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Applying tar in /home/podman_basic_user/.local/share/containers/storage/overlay/d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690/diff\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"finished reading layer \\\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"No compression detected\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Compression change for blob sha256:948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566 (\\\"application/vnd.oci.image.config.v1+json\\\") not supported\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Using original blob without modification\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"setting image creation date to 2025-07-12 16:37:17.118933835 +0000 UTC\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"created new image ID \\\"948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\\\" with metadata \\\"{}\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"added name \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" to image \\\"948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"printing final image id \\\"948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Pod using bridge network mode\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Got pod cgroup as /libpod_parent/49a038584fa170b91928655aabc3334b950d706880329a96a99914f26bc7c59d\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Looking up image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566)\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Inspecting image 948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Inspecting image 948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Inspecting image 948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"using systemd mode: false\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"setting container name 49a038584fa1-infra\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Loading seccomp profile from \\\"/usr/share/containers/seccomp.json\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Allocated lock 1 for container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Created container \\\"db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Container \\\"db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70\\\" has work directory \\\"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Container \\\"db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70\\\" has run directory \\\"/run/user/3001/containers/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Pulling image quay.io/libpod/testimage:20210610 (policy: missing)\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"using systemd mode: false\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"adding container to pod httpd1\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"setting container name httpd1-httpd1\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Loading seccomp profile from \\\"/usr/share/containers/seccomp.json\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=info msg=\"Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Adding mount /proc\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Adding mount /dev\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Adding mount /dev/pts\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Adding mount /dev/mqueue\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Adding mount /sys\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Adding mount /sys/fs/cgroup\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Allocated lock 2 for container 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Created container \\\"8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Container \\\"8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057\\\" has work directory \\\"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Container \\\"8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057\\\" has run directory \\\"/run/user/3001/containers/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Strongconnecting node db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Pushed db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 onto stack\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Finishing node db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70. Popped db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 off stack\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Strongconnecting node 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Pushed 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 onto stack\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Finishing node 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057. Popped 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 off stack\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/OM4I4NAT7NV6G6FUUDQFTEASSZ,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/a85a1e9326b5058f772b4fc440996b4ba136c02bad6966e471895a60f615298c/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/a85a1e9326b5058f772b4fc440996b4ba136c02bad6966e471895a60f615298c/work,userxattr,context=\\\"system_u:object_r:container_file_t:s0:c277,c351\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Mounted container \\\"db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70\\\" at \\\"/home/podman_basic_user/.local/share/containers/storage/overlay/a85a1e9326b5058f772b4fc440996b4ba136c02bad6966e471895a60f615298c/merged\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Created root filesystem for container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 at /home/podman_basic_user/.local/share/containers/storage/overlay/a85a1e9326b5058f772b4fc440996b4ba136c02bad6966e471895a60f615298c/merged\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Made network namespace at /run/user/3001/netns/netns-d0ac84ca-ca87-3466-1642-2cff38531036 for container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"creating rootless network namespace with name \\\"rootless-netns-d22c9f230d0691b8f418\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"slirp4netns command: /bin/slirp4netns --disable-host-loopback --mtu=65520 --enable-sandbox --enable-seccomp --enable-ipv6 -c -r 3 --netns-type=path /run/user/3001/netns/rootless-netns-d22c9f230d0691b8f418 tap0\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"The path of /etc/resolv.conf in the mount ns is \\\"/etc/resolv.conf\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"cni result for container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 network podman-default-kube-network: &{0.4.0 [{Name:cni-podman1 Mac:0a:fa:9a:36:b8:3a Sandbox:} {Name:veth14aad36c Mac:82:8b:99:b5:f7:b0 Sandbox:} {Name:eth0 Mac:b6:e7:40:6d:da:9c Sandbox:/run/user/3001/netns/netns-d0ac84ca-ca87-3466-1642-2cff38531036}] [{Version:4 Interface:0xc0008e9188 Address:{IP:10.89.0.2 Mask:ffffff00} Gateway:10.89.0.1}] [{Dst:{IP:0.0.0.0 Mask:00000000} GW:}] {[10.89.0.1] [dns.podman] []}}\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"rootlessport: time=\\\"2025-07-12T12:37:17-04:00\\\" level=info msg=\\\"Starting parent driver\\\"\\ntime=\\\"2025-07-12T12:37:17-04:00\\\" level=info msg=\\\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport4142254753/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport4142254753/.bp.sock]\\\"\\ntime=\\\"2025-07-12T12:37:17-04:00\\\" level=info msg=\\\"Starting child driver in child netns (\\\\\\\"/proc/self/exe\\\\\\\" [rootlessport-child])\\\"\\n\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"rootlessport: time=\\\"2025-07-12T12:37:17-04:00\\\" level=info msg=\\\"Waiting for initComplete\\\"\\n\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"rootlessport: time=\\\"2025-07-12T12:37:17-04:00\\\" level=info msg=\\\"initComplete is closed; parent and child established the communication channel\\\"\\n\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"rootlessport: time=\\\"2025-07-12T12:37:17-04:00\\\" level=info msg=\\\"Exposing ports [{ 80 15001 1 tcp}]\\\"\\n\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"rootlessport is ready\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"rootlessport: time=\\\"2025-07-12T12:37:17-04:00\\\" level=info msg=Ready\\n\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"/etc/system-fips does not exist on host, not mounting FIPS mode subscription\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"reading hooks from /usr/share/containers/oci/hooks.d\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Workdir \\\"/\\\" resolved to host path \\\"/home/podman_basic_user/.local/share/containers/storage/overlay/a85a1e9326b5058f772b4fc440996b4ba136c02bad6966e471895a60f615298c/merged\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Created OCI spec for container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata/config.json\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Got pod cgroup as \"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"/usr/bin/conmon messages will be logged to syslog\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"running conmon: /usr/bin/conmon\" args=\"[--api-version 1 -c db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 -u db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 -r /usr/bin/runc -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata -p /run/user/3001/containers/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata/pidfile -n 49a038584fa1-infra --exit-dir /run/user/3001/libpod/tmp/exits --full-attach -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg cgroupfs --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70]\"\n time=\"2025-07-12T12:37:17-04:00\" level=info msg=\"Failed to add conmon to cgroupfs sandbox cgroup: creating cgroup for cpu: mkdir /sys/fs/cgroup/cpu/libpod_parent: permission denied\"\n [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied\n \n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Received: 26787\"\n time=\"2025-07-12T12:37:17-04:00\" level=info msg=\"Got Conmon PID as 26777\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Created container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 in OCI runtime\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Adding nameserver(s) from network status of '[\\\"10.89.0.1\\\"]'\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Adding search domain(s) from network status of '[\\\"dns.podman\\\"]'\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Starting container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70 with command [/catatonit -P]\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Started container db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/SGU47AVGSROXANDACX3GODEDPF,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/1f9869d8c46f1dd99255378c38742ad9514c5c2f608c50250883573ed9f20064/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/1f9869d8c46f1dd99255378c38742ad9514c5c2f608c50250883573ed9f20064/work,userxattr,context=\\\"system_u:object_r:container_file_t:s0:c277,c351\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Mounted container \\\"8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057\\\" at \\\"/home/podman_basic_user/.local/share/containers/storage/overlay/1f9869d8c46f1dd99255378c38742ad9514c5c2f608c50250883573ed9f20064/merged\\\"\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Created root filesystem for container 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 at /home/podman_basic_user/.local/share/containers/storage/overlay/1f9869d8c46f1dd99255378c38742ad9514c5c2f608c50250883573ed9f20064/merged\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"/etc/system-fips does not exist on host, not mounting FIPS mode subscription\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"reading hooks from /usr/share/containers/oci/hooks.d\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Workdir \\\"/var/www\\\" resolved to a volume or mount\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Created OCI spec for container 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata/config.json\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Got pod cgroup as \"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"/usr/bin/conmon messages will be logged to syslog\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"running conmon: /usr/bin/conmon\" args=\"[--api-version 1 -c 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 -u 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 -r /usr/bin/runc -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata -p /run/user/3001/containers/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata/pidfile -n httpd1-httpd1 --exit-dir /run/user/3001/libpod/tmp/exits --full-attach -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg cgroupfs --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057]\"\n time=\"2025-07-12T12:37:17-04:00\" level=info msg=\"Failed to add conmon to cgroupfs sandbox cgroup: creating cgroup for cpu: mkdir /sys/fs/cgroup/cpu/conmon: permission denied\"\n [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied\n \n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Received: 26808\"\n time=\"2025-07-12T12:37:17-04:00\" level=info msg=\"Got Conmon PID as 26798\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Created container 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 in OCI runtime\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Starting container 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057 with command [/bin/busybox-extras httpd -f -p 80]\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Started container 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Called kube.PersistentPostRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)\"\n time=\"2025-07-12T12:37:17-04:00\" level=debug msg=\"Shutting down engines\"\nJul 12 12:37:17 managed-node2 platform-python[26566]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0\nJul 12 12:37:17 managed-node2 sudo[26563]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:37:18 managed-node2 sudo[26939]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yxngcsmbouppolsnchwedyvvmqwcqmcp ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338237.9288857-15948-265909207589811/AnsiballZ_systemd.py'\nJul 12 12:37:18 managed-node2 sudo[26939]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:37:18 managed-node2 platform-python[26942]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None\nJul 12 12:37:18 managed-node2 systemd[25539]: Reloading.\nJul 12 12:37:18 managed-node2 sudo[26939]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:37:18 managed-node2 sudo[27076]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jxciagckwyaiwverlxxxicxpjcaamzpb ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338238.5066783-15974-208582760237043/AnsiballZ_systemd.py'\nJul 12 12:37:18 managed-node2 sudo[27076]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:37:18 managed-node2 dnsmasq[26762]: listening on cni-podman1(#3): fe80::8fa:9aff:fe36:b83a%cni-podman1\nJul 12 12:37:18 managed-node2 platform-python[27079]: ansible-systemd Invoked with name= scope=user enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None user=None\nJul 12 12:37:18 managed-node2 systemd[25539]: Reloading.\nJul 12 12:37:18 managed-node2 sudo[27076]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:37:19 managed-node2 sudo[27215]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-aeingsotugnwsviddfzcxglibrontkpg ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338239.1567352-16007-20750914035253/AnsiballZ_systemd.py'\nJul 12 12:37:19 managed-node2 sudo[27215]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:37:19 managed-node2 platform-python[27218]: ansible-systemd Invoked with name= scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None\nJul 12 12:37:19 managed-node2 systemd[25539]: Created slice podman\\x2dkube.slice.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:19 managed-node2 systemd[25539]: Starting A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit UNIT has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has begun starting up.\nJul 12 12:37:19 managed-node2 conmon[26798]: conmon 8b812a2ec55f9de0cde0 : container 26808 exited with status 137\nJul 12 12:37:19 managed-node2 conmon[26777]: conmon db962b9f1559ffd15c96 : container 26787 exited with status 137\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057)\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=info msg=\"Using sqlite as database backend\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70)\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=info msg=\"Using sqlite as database backend\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using graph driver overlay\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using graph root /home/podman_basic_user/.local/share/containers/storage\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using run root /run/user/3001/containers\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using tmp dir /run/user/3001/libpod/tmp\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using transient store: false\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Cached value indicated that metacopy is not being used\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Cached value indicated that native-diff is usable\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Initializing event backend file\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=info msg=\"Setting parallel job count to 7\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using graph driver overlay\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using graph root /home/podman_basic_user/.local/share/containers/storage\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using run root /run/user/3001/containers\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using tmp dir /run/user/3001/libpod/tmp\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using transient store: false\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Cached value indicated that metacopy is not being used\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Cached value indicated that native-diff is usable\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Initializing event backend file\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=info msg=\"Setting parallel job count to 7\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup 8b812a2ec55f9de0cde0ea5e46f1692d3e1abd3473484d7fdd3be87fd7195057)\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27244]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Shutting down engines\"\nJul 12 12:37:19 managed-node2 kernel: cni-podman1: port 1(veth14aad36c) entered disabled state\nJul 12 12:37:19 managed-node2 kernel: device veth14aad36c left promiscuous mode\nJul 12 12:37:19 managed-node2 kernel: cni-podman1: port 1(veth14aad36c) entered disabled state\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup db962b9f1559ffd15c962e4cb0f6cb80639c2b68db20363e66e1a1d3387c6a70)\"\nJul 12 12:37:19 managed-node2 /usr/bin/podman[27252]: time=\"2025-07-12T12:37:19-04:00\" level=debug msg=\"Shutting down engines\"\nJul 12 12:37:19 managed-node2 podman[27224]: Pods stopped:\nJul 12 12:37:19 managed-node2 podman[27224]: 49a038584fa170b91928655aabc3334b950d706880329a96a99914f26bc7c59d\nJul 12 12:37:19 managed-node2 podman[27224]: Pods removed:\nJul 12 12:37:19 managed-node2 podman[27224]: 49a038584fa170b91928655aabc3334b950d706880329a96a99914f26bc7c59d\nJul 12 12:37:19 managed-node2 podman[27224]: Secrets removed:\nJul 12 12:37:19 managed-node2 podman[27224]: Volumes removed:\nJul 12 12:37:20 managed-node2 systemd[25539]: Started rootless-netns-910042d3.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:20 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): veth73ffc199: link is not ready\nJul 12 12:37:20 managed-node2 kernel: cni-podman1: port 1(veth73ffc199) entered blocking state\nJul 12 12:37:20 managed-node2 kernel: cni-podman1: port 1(veth73ffc199) entered disabled state\nJul 12 12:37:20 managed-node2 kernel: device veth73ffc199 entered promiscuous mode\nJul 12 12:37:20 managed-node2 kernel: cni-podman1: port 1(veth73ffc199) entered blocking state\nJul 12 12:37:20 managed-node2 kernel: cni-podman1: port 1(veth73ffc199) entered forwarding state\nJul 12 12:37:20 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth73ffc199: link becomes ready\nJul 12 12:37:20 managed-node2 dnsmasq[27470]: listening on cni-podman1(#3): 10.89.0.1\nJul 12 12:37:20 managed-node2 dnsmasq[27472]: started, version 2.79 cachesize 150\nJul 12 12:37:20 managed-node2 dnsmasq[27472]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify\nJul 12 12:37:20 managed-node2 dnsmasq[27472]: using local addresses only for domain dns.podman\nJul 12 12:37:20 managed-node2 dnsmasq[27472]: reading /etc/resolv.conf\nJul 12 12:37:20 managed-node2 dnsmasq[27472]: using local addresses only for domain dns.podman\nJul 12 12:37:20 managed-node2 dnsmasq[27472]: using nameserver 10.0.2.3#53\nJul 12 12:37:20 managed-node2 dnsmasq[27472]: using nameserver 10.29.169.13#53\nJul 12 12:37:20 managed-node2 dnsmasq[27472]: using nameserver 10.29.170.12#53\nJul 12 12:37:20 managed-node2 dnsmasq[27472]: using nameserver 10.2.32.1#53\nJul 12 12:37:20 managed-node2 dnsmasq[27472]: read /run/user/3001/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses\nJul 12 12:37:20 managed-node2 podman[27224]: Pod:\nJul 12 12:37:20 managed-node2 podman[27224]: e6000b000d16f866b55a1a19dd3fb3262c82c732a816136db57b7e8368f0a32a\nJul 12 12:37:20 managed-node2 podman[27224]: Container:\nJul 12 12:37:20 managed-node2 podman[27224]: 3e84611729acf9a795f4d6223da39f911f01d8e5bb78d05b15144b66878ad807\nJul 12 12:37:20 managed-node2 systemd[25539]: Started A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:20 managed-node2 sudo[27215]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:37:21 managed-node2 platform-python[27649]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 12 12:37:21 managed-node2 platform-python[27773]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:37:22 managed-node2 dnsmasq[27472]: listening on cni-podman1(#3): fe80::c95:b4ff:fe67:d35c%cni-podman1\nJul 12 12:37:23 managed-node2 platform-python[27898]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:24 managed-node2 platform-python[28022]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:37:24 managed-node2 platform-python[28145]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:37:25 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:37:25 managed-node2 platform-python[28435]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:37:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:37:26 managed-node2 platform-python[28558]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:37:26 managed-node2 platform-python[28681]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 12 12:37:27 managed-node2 platform-python[28780]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752338246.5734363-16367-230792965661198/source _original_basename=tmpcx3lufsl follow=False checksum=d1d2b75756121a76b51c55942528a638a8e19d00 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None\nJul 12 12:37:27 managed-node2 platform-python[28905]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:37:27 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice.\n-- Subject: Unit machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8210] manager: (cni-podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3)\nJul 12 12:37:27 managed-node2 systemd-udevd[28952]: Using default interface naming scheme 'rhel-8.0'.\nJul 12 12:37:27 managed-node2 systemd-udevd[28953]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.\nJul 12 12:37:27 managed-node2 systemd-udevd[28953]: Could not generate persistent MAC address for vetha808c72b: No such file or directory\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8294] manager: (vetha808c72b): new Veth device (/org/freedesktop/NetworkManager/Devices/4)\nJul 12 12:37:27 managed-node2 systemd-udevd[28952]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.\nJul 12 12:37:27 managed-node2 systemd-udevd[28952]: Could not generate persistent MAC address for cni-podman1: No such file or directory\nJul 12 12:37:27 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vetha808c72b: link is not ready\nJul 12 12:37:27 managed-node2 kernel: cni-podman1: port 1(vetha808c72b) entered blocking state\nJul 12 12:37:27 managed-node2 kernel: cni-podman1: port 1(vetha808c72b) entered disabled state\nJul 12 12:37:27 managed-node2 kernel: device vetha808c72b entered promiscuous mode\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8417] device (cni-podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', sys-iface-state: 'external')\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8423] device (cni-podman1): state change: unavailable -> disconnected (reason 'connection-assumed', sys-iface-state: 'external')\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8433] device (cni-podman1): Activation: starting connection 'cni-podman1' (9399044c-ebcb-4319-aff1-7a172e94e2ea)\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8434] device (cni-podman1): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'external')\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8436] device (cni-podman1): state change: prepare -> config (reason 'none', sys-iface-state: 'external')\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8438] device (cni-podman1): state change: config -> ip-config (reason 'none', sys-iface-state: 'external')\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8440] device (cni-podman1): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'external')\nJul 12 12:37:27 managed-node2 dbus-daemon[601]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.5' (uid=0 pid=666 comm=\"/usr/sbin/NetworkManager --no-daemon \" label=\"system_u:system_r:NetworkManager_t:s0\")\nJul 12 12:37:27 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready\nJul 12 12:37:27 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready\nJul 12 12:37:27 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vetha808c72b: link becomes ready\nJul 12 12:37:27 managed-node2 kernel: cni-podman1: port 1(vetha808c72b) entered blocking state\nJul 12 12:37:27 managed-node2 kernel: cni-podman1: port 1(vetha808c72b) entered forwarding state\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8634] device (vetha808c72b): carrier: link connected\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.8637] device (cni-podman1): carrier: link connected\nJul 12 12:37:27 managed-node2 systemd[1]: Starting Network Manager Script Dispatcher Service...\n-- Subject: Unit NetworkManager-dispatcher.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit NetworkManager-dispatcher.service has begun starting up.\nJul 12 12:37:27 managed-node2 dbus-daemon[601]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher'\nJul 12 12:37:27 managed-node2 systemd[1]: Started Network Manager Script Dispatcher Service.\n-- Subject: Unit NetworkManager-dispatcher.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit NetworkManager-dispatcher.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.9275] device (cni-podman1): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'external')\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.9277] device (cni-podman1): state change: secondaries -> activated (reason 'none', sys-iface-state: 'external')\nJul 12 12:37:27 managed-node2 NetworkManager[666]: [1752338247.9282] device (cni-podman1): Activation: successful, device activated.\nJul 12 12:37:28 managed-node2 dnsmasq[29076]: listening on cni-podman1(#3): 10.89.0.1\nJul 12 12:37:28 managed-node2 dnsmasq[29080]: started, version 2.79 cachesize 150\nJul 12 12:37:28 managed-node2 dnsmasq[29080]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify\nJul 12 12:37:28 managed-node2 dnsmasq[29080]: using local addresses only for domain dns.podman\nJul 12 12:37:28 managed-node2 dnsmasq[29080]: reading /etc/resolv.conf\nJul 12 12:37:28 managed-node2 dnsmasq[29080]: using local addresses only for domain dns.podman\nJul 12 12:37:28 managed-node2 dnsmasq[29080]: using nameserver 10.29.169.13#53\nJul 12 12:37:28 managed-node2 dnsmasq[29080]: using nameserver 10.29.170.12#53\nJul 12 12:37:28 managed-node2 dnsmasq[29080]: using nameserver 10.2.32.1#53\nJul 12 12:37:28 managed-node2 dnsmasq[29080]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses\nJul 12 12:37:28 managed-node2 systemd[1]: Started libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope.\n-- Subject: Unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:28 managed-node2 conmon[29082]: conmon 2bba312ac73f9a5e5631 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach}\nJul 12 12:37:28 managed-node2 conmon[29082]: conmon 2bba312ac73f9a5e5631 : terminal_ctrl_fd: 13\nJul 12 12:37:28 managed-node2 conmon[29082]: conmon 2bba312ac73f9a5e5631 : winsz read side: 17, winsz write side: 18\nJul 12 12:37:28 managed-node2 systemd[1]: Started libcontainer container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.\n-- Subject: Unit libpod-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:28 managed-node2 conmon[29082]: conmon 2bba312ac73f9a5e5631 : container PID: 29092\nJul 12 12:37:28 managed-node2 systemd[1]: Started libpod-conmon-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope.\n-- Subject: Unit libpod-conmon-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-conmon-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:28 managed-node2 conmon[29103]: conmon dde5d521943a5269ac5d : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach}\nJul 12 12:37:28 managed-node2 conmon[29103]: conmon dde5d521943a5269ac5d : terminal_ctrl_fd: 12\nJul 12 12:37:28 managed-node2 conmon[29103]: conmon dde5d521943a5269ac5d : winsz read side: 16, winsz write side: 17\nJul 12 12:37:28 managed-node2 systemd[1]: Started libcontainer container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.\n-- Subject: Unit libpod-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:28 managed-node2 conmon[29103]: conmon dde5d521943a5269ac5d : container PID: 29114\nJul 12 12:37:28 managed-node2 platform-python[28905]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml\nJul 12 12:37:28 managed-node2 platform-python[28905]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod:\n a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd\n Container:\n dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\n \nJul 12 12:37:28 managed-node2 platform-python[28905]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time=\"2025-07-12T12:37:27-04:00\" level=info msg=\"/usr/bin/podman filtering at log level debug\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Using run root /run/containers/storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Using tmp dir /run/libpod\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Using transient store: false\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\n time=\"2025-07-12T12:37:27-04:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Initializing event backend file\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-12 12:34:58.774465298 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Successfully loaded 2 networks\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Looking up image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2)\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Pod using bridge network mode\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice for parent machine.slice and name libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Looking up image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2)\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Inspecting image 4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Inspecting image 4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Inspecting image 4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"using systemd mode: false\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"setting container name a247d85c3822-infra\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Loading seccomp profile from \\\"/usr/share/containers/seccomp.json\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Allocated lock 1 for container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Cached value indicated that idmapped mounts for overlay are not supported\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Check for idmapped mounts support \"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Created container \\\"2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Container \\\"2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\\\" has work directory \\\"/var/lib/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Container \\\"2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\\\" has run directory \\\"/run/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Pulling image quay.io/libpod/testimage:20210610 (policy: missing)\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"using systemd mode: false\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"adding container to pod httpd2\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"setting container name httpd2-httpd2\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Loading seccomp profile from \\\"/usr/share/containers/seccomp.json\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=info msg=\"Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Adding mount /proc\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Adding mount /dev\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Adding mount /dev/pts\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Adding mount /dev/mqueue\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Adding mount /sys\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Adding mount /sys/fs/cgroup\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Allocated lock 2 for container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Created container \\\"dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Container \\\"dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\\\" has work directory \\\"/var/lib/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Container \\\"dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\\\" has run directory \\\"/run/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Strongconnecting node dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Pushed dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 onto stack\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Recursing to successor node 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Strongconnecting node 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Pushed 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 onto stack\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Finishing node 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07. Popped 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 off stack\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Finishing node dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0. Popped dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 off stack\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/BPQ67IPF3U2MS7MKOAJ6EE5AVL,upperdir=/var/lib/containers/storage/overlay/110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0/diff,workdir=/var/lib/containers/storage/overlay/110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0/work,nodev,metacopy=on,context=\\\"system_u:object_r:container_file_t:s0:c20,c130\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Mounted container \\\"2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\\\" at \\\"/var/lib/containers/storage/overlay/110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0/merged\\\"\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Created root filesystem for container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 at /var/lib/containers/storage/overlay/110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0/merged\"\n time=\"2025-07-12T12:37:27-04:00\" level=debug msg=\"Made network namespace at /run/netns/netns-93660061-5819-4d54-dfec-784d954efe33 for container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"cni result for container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 network podman-default-kube-network: &{0.4.0 [{Name:cni-podman1 Mac:7e:63:02:ee:ed:5c Sandbox:} {Name:vetha808c72b Mac:8a:e4:ca:d3:1c:60 Sandbox:} {Name:eth0 Mac:f2:ab:50:c0:43:48 Sandbox:/run/netns/netns-93660061-5819-4d54-dfec-784d954efe33}] [{Version:4 Interface:0xc0006632b8 Address:{IP:10.89.0.2 Mask:ffffff00} Gateway:10.89.0.1}] [{Dst:{IP:0.0.0.0 Mask:00000000} GW:}] {[10.89.0.1] [dns.podman] []}}\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"/etc/system-fips does not exist on host, not mounting FIPS mode subscription\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Setting Cgroups for container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 to machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice:libpod:2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"reading hooks from /usr/share/containers/oci/hooks.d\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Workdir \\\"/\\\" resolved to host path \\\"/var/lib/containers/storage/overlay/110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0/merged\\\"\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Created OCI spec for container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 at /var/lib/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata/config.json\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice for parent machine.slice and name libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"/usr/bin/conmon messages will be logged to syslog\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"running conmon: /usr/bin/conmon\" args=\"[--api-version 1 -c 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 -u 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 -r /usr/bin/runc -b /var/lib/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata -p /run/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata/pidfile -n a247d85c3822-infra --exit-dir /run/libpod/exits --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07]\"\n time=\"2025-07-12T12:37:28-04:00\" level=info msg=\"Running conmon under slice machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice and unitName libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Received: 29092\"\n time=\"2025-07-12T12:37:28-04:00\" level=info msg=\"Got Conmon PID as 29082\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Created container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 in OCI runtime\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Adding nameserver(s) from network status of '[\\\"10.89.0.1\\\"]'\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Adding search domain(s) from network status of '[\\\"dns.podman\\\"]'\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Starting container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07 with command [/catatonit -P]\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Started container 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/UMCCOJYMJQIWGK7MOUSAJGNIT3,upperdir=/var/lib/containers/storage/overlay/4d39b8c35596ee353ef440ad2f35ea10e1beaa01c7e9bc9a2362e309c3d6bc72/diff,workdir=/var/lib/containers/storage/overlay/4d39b8c35596ee353ef440ad2f35ea10e1beaa01c7e9bc9a2362e309c3d6bc72/work,nodev,metacopy=on,context=\\\"system_u:object_r:container_file_t:s0:c20,c130\\\"\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Mounted container \\\"dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\\\" at \\\"/var/lib/containers/storage/overlay/4d39b8c35596ee353ef440ad2f35ea10e1beaa01c7e9bc9a2362e309c3d6bc72/merged\\\"\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Created root filesystem for container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 at /var/lib/containers/storage/overlay/4d39b8c35596ee353ef440ad2f35ea10e1beaa01c7e9bc9a2362e309c3d6bc72/merged\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"/etc/system-fips does not exist on host, not mounting FIPS mode subscription\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Setting Cgroups for container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 to machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice:libpod:dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"reading hooks from /usr/share/containers/oci/hooks.d\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Workdir \\\"/var/www\\\" resolved to a volume or mount\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Created OCI spec for container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 at /var/lib/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata/config.json\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice for parent machine.slice and name libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"/usr/bin/conmon messages will be logged to syslog\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"running conmon: /usr/bin/conmon\" args=\"[--api-version 1 -c dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 -u dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 -r /usr/bin/runc -b /var/lib/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata -p /run/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0]\"\n time=\"2025-07-12T12:37:28-04:00\" level=info msg=\"Running conmon under slice machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice and unitName libpod-conmon-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Received: 29114\"\n time=\"2025-07-12T12:37:28-04:00\" level=info msg=\"Got Conmon PID as 29103\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Created container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 in OCI runtime\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Starting container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0 with command [/bin/busybox-extras httpd -f -p 80]\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Started container dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)\"\n time=\"2025-07-12T12:37:28-04:00\" level=debug msg=\"Shutting down engines\"\nJul 12 12:37:28 managed-node2 platform-python[28905]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0\nJul 12 12:37:28 managed-node2 platform-python[29245]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None\nJul 12 12:37:28 managed-node2 systemd[1]: Reloading.\nJul 12 12:37:29 managed-node2 platform-python[29406]: ansible-systemd Invoked with name= scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None user=None\nJul 12 12:37:29 managed-node2 systemd[1]: Reloading.\nJul 12 12:37:29 managed-node2 dnsmasq[29080]: listening on cni-podman1(#3): fe80::7c63:2ff:feee:ed5c%cni-podman1\nJul 12 12:37:30 managed-node2 platform-python[29569]: ansible-systemd Invoked with name= scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None\nJul 12 12:37:30 managed-node2 systemd[1]: Created slice system-podman\\x2dkube.slice.\n-- Subject: Unit system-podman\\x2dkube.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit system-podman\\x2dkube.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:30 managed-node2 systemd[1]: Starting A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun starting up.\nJul 12 12:37:30 managed-node2 conmon[29082]: conmon 2bba312ac73f9a5e5631 : container 29092 exited with status 137\nJul 12 12:37:30 managed-node2 systemd[1]: libpod-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has successfully entered the 'dead' state.\nJul 12 12:37:30 managed-node2 systemd[1]: libpod-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope completed and consumed the indicated resources.\nJul 12 12:37:30 managed-node2 conmon[29103]: conmon dde5d521943a5269ac5d : container 29114 exited with status 137\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07)\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nJul 12 12:37:30 managed-node2 systemd[1]: libpod-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope has successfully entered the 'dead' state.\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nJul 12 12:37:30 managed-node2 systemd[1]: libpod-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope completed and consumed the indicated resources.\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=info msg=\"Using sqlite as database backend\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using graph driver overlay\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using run root /run/containers/storage\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using tmp dir /run/libpod\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using transient store: false\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Initializing event backend file\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=info msg=\"Setting parallel job count to 7\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0)\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=info msg=\"Using sqlite as database backend\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using graph driver overlay\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using run root /run/containers/storage\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using tmp dir /run/libpod\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using transient store: false\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Initializing event backend file\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=info msg=\"Setting parallel job count to 7\"\nJul 12 12:37:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay-4d39b8c35596ee353ef440ad2f35ea10e1beaa01c7e9bc9a2362e309c3d6bc72-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-4d39b8c35596ee353ef440ad2f35ea10e1beaa01c7e9bc9a2362e309c3d6bc72-merged.mount has successfully entered the 'dead' state.\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0)\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29601]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Shutting down engines\"\nJul 12 12:37:30 managed-node2 systemd[1]: libpod-conmon-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-conmon-dde5d521943a5269ac5daa62405cbea3739266b8eb152fcd05c8f832d08f85f0.scope has successfully entered the 'dead' state.\nJul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vetha808c72b) entered disabled state\nJul 12 12:37:30 managed-node2 kernel: device vetha808c72b left promiscuous mode\nJul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vetha808c72b) entered disabled state\nJul 12 12:37:30 managed-node2 systemd[1]: run-netns-netns\\x2d93660061\\x2d5819\\x2d4d54\\x2ddfec\\x2d784d954efe33.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-netns-netns\\x2d93660061\\x2d5819\\x2d4d54\\x2ddfec\\x2d784d954efe33.mount has successfully entered the 'dead' state.\nJul 12 12:37:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07-userdata-shm.mount has successfully entered the 'dead' state.\nJul 12 12:37:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay-110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-110cfc0ad81e71b5ffbe276864202fb14361279ce6bdc635dcf4d6c0a0ade6b0-merged.mount has successfully entered the 'dead' state.\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup 2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07)\"\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=debug msg=\"Shutting down engines\"\nJul 12 12:37:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:37:30 managed-node2 systemd[1]: Stopping libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope.\n-- Subject: Unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has begun shutting down.\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=info msg=\"Received shutdown signal \\\"terminated\\\", terminating!\" PID=29592\nJul 12 12:37:30 managed-node2 /usr/bin/podman[29592]: time=\"2025-07-12T12:37:30-04:00\" level=info msg=\"Invoking shutdown handler \\\"libpod\\\"\" PID=29592\nJul 12 12:37:30 managed-node2 systemd[1]: libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has successfully entered the 'dead' state.\nJul 12 12:37:30 managed-node2 systemd[1]: Stopped libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope.\n-- Subject: Unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-conmon-2bba312ac73f9a5e5631d02b48504796fac44b726a2a4f5239f924dc71bcce07.scope has finished shutting down.\nJul 12 12:37:30 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice.\n-- Subject: Unit machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice has finished shutting down.\nJul 12 12:37:30 managed-node2 systemd[1]: machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice: Consumed 212ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd.slice completed and consumed the indicated resources.\nJul 12 12:37:30 managed-node2 podman[29576]: Pods stopped:\nJul 12 12:37:30 managed-node2 podman[29576]: a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd\nJul 12 12:37:30 managed-node2 podman[29576]: Pods removed:\nJul 12 12:37:30 managed-node2 podman[29576]: a247d85c3822f35fed5177ef0a98313077301f923b4a98ddd71e72c2a867eecd\nJul 12 12:37:30 managed-node2 podman[29576]: Secrets removed:\nJul 12 12:37:30 managed-node2 podman[29576]: Volumes removed:\nJul 12 12:37:30 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice.\n-- Subject: Unit machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:30 managed-node2 systemd[1]: Started libcontainer container af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.\n-- Subject: Unit libpod-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:30 managed-node2 systemd-udevd[29733]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.\nJul 12 12:37:30 managed-node2 systemd-udevd[29733]: Could not generate persistent MAC address for vethec9deee2: No such file or directory\nJul 12 12:37:30 managed-node2 NetworkManager[666]: [1752338250.7637] manager: (vethec9deee2): new Veth device (/org/freedesktop/NetworkManager/Devices/5)\nJul 12 12:37:30 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vethec9deee2: link is not ready\nJul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered blocking state\nJul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered disabled state\nJul 12 12:37:30 managed-node2 kernel: device vethec9deee2 entered promiscuous mode\nJul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered blocking state\nJul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered forwarding state\nJul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered disabled state\nJul 12 12:37:30 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vethec9deee2: link becomes ready\nJul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered blocking state\nJul 12 12:37:30 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered forwarding state\nJul 12 12:37:30 managed-node2 NetworkManager[666]: [1752338250.7897] device (vethec9deee2): carrier: link connected\nJul 12 12:37:30 managed-node2 NetworkManager[666]: [1752338250.7911] device (cni-podman1): carrier: link connected\nJul 12 12:37:30 managed-node2 dnsmasq[29803]: listening on cni-podman1(#3): 10.89.0.1\nJul 12 12:37:30 managed-node2 dnsmasq[29803]: listening on cni-podman1(#3): fe80::7c63:2ff:feee:ed5c%cni-podman1\nJul 12 12:37:30 managed-node2 dnsmasq[29808]: started, version 2.79 cachesize 150\nJul 12 12:37:30 managed-node2 dnsmasq[29808]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify\nJul 12 12:37:30 managed-node2 dnsmasq[29808]: using local addresses only for domain dns.podman\nJul 12 12:37:30 managed-node2 dnsmasq[29808]: reading /etc/resolv.conf\nJul 12 12:37:30 managed-node2 dnsmasq[29808]: using local addresses only for domain dns.podman\nJul 12 12:37:30 managed-node2 dnsmasq[29808]: using nameserver 10.29.169.13#53\nJul 12 12:37:30 managed-node2 dnsmasq[29808]: using nameserver 10.29.170.12#53\nJul 12 12:37:30 managed-node2 dnsmasq[29808]: using nameserver 10.2.32.1#53\nJul 12 12:37:30 managed-node2 dnsmasq[29808]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses\nJul 12 12:37:30 managed-node2 systemd[1]: Started libcontainer container 39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.\n-- Subject: Unit libpod-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:31 managed-node2 systemd[1]: Started libcontainer container fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.\n-- Subject: Unit libpod-fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:31 managed-node2 podman[29576]: Pod:\nJul 12 12:37:31 managed-node2 podman[29576]: ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5\nJul 12 12:37:31 managed-node2 podman[29576]: Container:\nJul 12 12:37:31 managed-node2 podman[29576]: fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149\nJul 12 12:37:31 managed-node2 systemd[1]: Started A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:31 managed-node2 platform-python[29974]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:37:33 managed-node2 platform-python[30107]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:34 managed-node2 platform-python[30231]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:37:34 managed-node2 platform-python[30354]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:37:36 managed-node2 platform-python[30643]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:37:36 managed-node2 platform-python[30766]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:37:37 managed-node2 platform-python[30889]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 12 12:37:37 managed-node2 platform-python[30988]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752338256.766105-16794-202828239900759/source _original_basename=tmpvj89f27p follow=False checksum=92197531821af6a866eb3c8d736aa33d00262127 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None\nJul 12 12:37:37 managed-node2 platform-python[31113]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:37:37 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice.\n-- Subject: Unit machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:38 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nJul 12 12:37:38 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vetha724e550: link is not ready\nJul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered blocking state\nJul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered disabled state\nJul 12 12:37:38 managed-node2 kernel: device vetha724e550 entered promiscuous mode\nJul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered blocking state\nJul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered forwarding state\nJul 12 12:37:38 managed-node2 NetworkManager[666]: [1752338258.0378] manager: (vetha724e550): new Veth device (/org/freedesktop/NetworkManager/Devices/6)\nJul 12 12:37:38 managed-node2 systemd-udevd[31161]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.\nJul 12 12:37:38 managed-node2 systemd-udevd[31161]: Could not generate persistent MAC address for vetha724e550: No such file or directory\nJul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered disabled state\nJul 12 12:37:38 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready\nJul 12 12:37:38 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready\nJul 12 12:37:38 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vetha724e550: link becomes ready\nJul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered blocking state\nJul 12 12:37:38 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered forwarding state\nJul 12 12:37:38 managed-node2 NetworkManager[666]: [1752338258.0795] device (vetha724e550): carrier: link connected\nJul 12 12:37:38 managed-node2 dnsmasq[29808]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 2 addresses\nJul 12 12:37:38 managed-node2 systemd[1]: Started libpod-conmon-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope.\n-- Subject: Unit libpod-conmon-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-conmon-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:38 managed-node2 systemd[1]: Started libcontainer container 8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.\n-- Subject: Unit libpod-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:38 managed-node2 systemd[1]: Started libpod-conmon-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope.\n-- Subject: Unit libpod-conmon-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-conmon-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:38 managed-node2 systemd[1]: Started libcontainer container 239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.\n-- Subject: Unit libpod-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:38 managed-node2 platform-python[31394]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None\nJul 12 12:37:38 managed-node2 systemd[1]: Reloading.\nJul 12 12:37:39 managed-node2 platform-python[31555]: ansible-systemd Invoked with name= scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None user=None\nJul 12 12:37:39 managed-node2 systemd[1]: Reloading.\nJul 12 12:37:40 managed-node2 platform-python[31710]: ansible-systemd Invoked with name= scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None\nJul 12 12:37:40 managed-node2 systemd[1]: Starting A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun starting up.\nJul 12 12:37:40 managed-node2 systemd[1]: libpod-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope has successfully entered the 'dead' state.\nJul 12 12:37:40 managed-node2 systemd[1]: libpod-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope completed and consumed the indicated resources.\nJul 12 12:37:40 managed-node2 systemd[1]: libpod-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope has successfully entered the 'dead' state.\nJul 12 12:37:40 managed-node2 systemd[1]: libpod-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope completed and consumed the indicated resources.\nJul 12 12:37:40 managed-node2 dnsmasq[29808]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses\nJul 12 12:37:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay-719154c260667d3aa74578747f416c045e6c4537dd0a7c671adf4544cf226e68-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-719154c260667d3aa74578747f416c045e6c4537dd0a7c671adf4544cf226e68-merged.mount has successfully entered the 'dead' state.\nJul 12 12:37:40 managed-node2 systemd[1]: libpod-conmon-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-conmon-239858a6927058e6a334d83df993295090bdb1b8fadc117ac30dd58a6d442de8.scope has successfully entered the 'dead' state.\nJul 12 12:37:40 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered disabled state\nJul 12 12:37:40 managed-node2 kernel: device vetha724e550 left promiscuous mode\nJul 12 12:37:40 managed-node2 kernel: cni-podman1: port 2(vetha724e550) entered disabled state\nJul 12 12:37:40 managed-node2 systemd[1]: run-netns-netns\\x2d1bb9153f\\x2df22a\\x2dcc5d\\x2d3c7a\\x2dd87e5ee733ce.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-netns-netns\\x2d1bb9153f\\x2df22a\\x2dcc5d\\x2d3c7a\\x2dd87e5ee733ce.mount has successfully entered the 'dead' state.\nJul 12 12:37:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136-userdata-shm.mount has successfully entered the 'dead' state.\nJul 12 12:37:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay-eb2787269c2e2cd7be423803b1667df0aa39556214229872d965cd9cab309419-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-eb2787269c2e2cd7be423803b1667df0aa39556214229872d965cd9cab309419-merged.mount has successfully entered the 'dead' state.\nJul 12 12:37:40 managed-node2 systemd[1]: libpod-conmon-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-conmon-8a60183f3d723a4d3c824e075f5b0a62c6fb264dbd1f8ac4124cb27d56e80136.scope has successfully entered the 'dead' state.\nJul 12 12:37:40 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice.\n-- Subject: Unit machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice has finished shutting down.\nJul 12 12:37:40 managed-node2 systemd[1]: machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice: Consumed 199ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583.slice completed and consumed the indicated resources.\nJul 12 12:37:40 managed-node2 podman[31717]: Pods stopped:\nJul 12 12:37:40 managed-node2 podman[31717]: 537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583\nJul 12 12:37:40 managed-node2 podman[31717]: Pods removed:\nJul 12 12:37:40 managed-node2 podman[31717]: 537ae713a68312ee725af28c808792f680dca3b2c218aa0b7af49348c455d583\nJul 12 12:37:40 managed-node2 podman[31717]: Secrets removed:\nJul 12 12:37:40 managed-node2 podman[31717]: Volumes removed:\nJul 12 12:37:40 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice.\n-- Subject: Unit machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:40 managed-node2 systemd[1]: Started libcontainer container 7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.\n-- Subject: Unit libpod-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:40 managed-node2 NetworkManager[666]: [1752338260.9491] manager: (veth3fe74d71): new Veth device (/org/freedesktop/NetworkManager/Devices/7)\nJul 12 12:37:40 managed-node2 systemd-udevd[31882]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.\nJul 12 12:37:40 managed-node2 systemd-udevd[31882]: Could not generate persistent MAC address for veth3fe74d71: No such file or directory\nJul 12 12:37:40 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): veth3fe74d71: link is not ready\nJul 12 12:37:40 managed-node2 kernel: cni-podman1: port 2(veth3fe74d71) entered blocking state\nJul 12 12:37:40 managed-node2 kernel: cni-podman1: port 2(veth3fe74d71) entered disabled state\nJul 12 12:37:40 managed-node2 kernel: device veth3fe74d71 entered promiscuous mode\nJul 12 12:37:40 managed-node2 kernel: cni-podman1: port 2(veth3fe74d71) entered blocking state\nJul 12 12:37:40 managed-node2 kernel: cni-podman1: port 2(veth3fe74d71) entered forwarding state\nJul 12 12:37:40 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth3fe74d71: link becomes ready\nJul 12 12:37:40 managed-node2 NetworkManager[666]: [1752338260.9931] device (veth3fe74d71): carrier: link connected\nJul 12 12:37:41 managed-node2 dnsmasq[29808]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 2 addresses\nJul 12 12:37:41 managed-node2 systemd[1]: Started libcontainer container 304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.\n-- Subject: Unit libpod-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:41 managed-node2 systemd[1]: Started libcontainer container e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.\n-- Subject: Unit libpod-e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:41 managed-node2 systemd[1]: Started A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:41 managed-node2 podman[31717]: Pod:\nJul 12 12:37:41 managed-node2 podman[31717]: ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2\nJul 12 12:37:41 managed-node2 podman[31717]: Container:\nJul 12 12:37:41 managed-node2 podman[31717]: e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e\nJul 12 12:37:41 managed-node2 sudo[32116]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jyrknhzkjwtoyoqfhtaoymdanzpphasy ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338261.7921255-17011-231273247445257/AnsiballZ_command.py'\nJul 12 12:37:41 managed-node2 sudo[32116]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:37:42 managed-node2 platform-python[32119]: ansible-command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:42 managed-node2 systemd[25539]: Started podman-32128.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:37:42 managed-node2 sudo[32116]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:37:42 managed-node2 platform-python[32258]: ansible-command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:42 managed-node2 platform-python[32389]: ansible-command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:43 managed-node2 sudo[32528]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lhqlhpwddcodyczhbsyjvspptskrqirm ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338263.154581-17086-94452808741655/AnsiballZ_command.py'\nJul 12 12:37:43 managed-node2 sudo[32528]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:37:43 managed-node2 platform-python[32531]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:43 managed-node2 sudo[32528]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:37:43 managed-node2 platform-python[32657]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:44 managed-node2 platform-python[32783]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:44 managed-node2 platform-python[32909]: ansible-uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:37:45 managed-node2 platform-python[33033]: ansible-uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:37:45 managed-node2 rsyslogd[1025]: imjournal: journal files changed, reloading... [v8.2102.0-15.el8 try https://www.rsyslog.com/e/0 ]\nJul 12 12:37:45 managed-node2 platform-python[33158]: ansible-command Invoked with _raw_params=ls -alrtF /tmp/lsr__qviri30_podman/httpd1-create warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:45 managed-node2 platform-python[33282]: ansible-command Invoked with _raw_params=ls -alrtF /tmp/lsr__qviri30_podman/httpd2-create warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:46 managed-node2 platform-python[33406]: ansible-command Invoked with _raw_params=ls -alrtF /tmp/lsr__qviri30_podman/httpd3-create warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:49 managed-node2 platform-python[33655]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:37:50 managed-node2 platform-python[33784]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:37:53 managed-node2 platform-python[33909]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:37:56 managed-node2 platform-python[34032]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 12 12:37:56 managed-node2 platform-python[34159]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 12 12:37:57 managed-node2 platform-python[34286]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 12 12:37:59 managed-node2 platform-python[34409]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:38:02 managed-node2 platform-python[34532]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:38:05 managed-node2 platform-python[34655]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:38:08 managed-node2 platform-python[34778]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 12 12:38:10 managed-node2 platform-python[34939]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 12 12:38:10 managed-node2 platform-python[35062]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 12 12:38:15 managed-node2 platform-python[35185]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None\nJul 12 12:38:15 managed-node2 platform-python[35309]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:38:16 managed-node2 platform-python[35434]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:16 managed-node2 platform-python[35558]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:17 managed-node2 platform-python[35682]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:18 managed-node2 platform-python[35806]: ansible-command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None\nJul 12 12:38:19 managed-node2 platform-python[35929]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:19 managed-node2 platform-python[36052]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:19 managed-node2 sudo[36175]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sfjnrnyknupgcycrjkhhnhuswecfqpyf ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338299.776674-18742-47644857358508/AnsiballZ_podman_image.py'\nJul 12 12:38:19 managed-node2 sudo[36175]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:38:20 managed-node2 systemd[25539]: Started podman-36180.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:38:20 managed-node2 systemd[25539]: Started podman-36189.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:38:20 managed-node2 systemd[25539]: Started podman-36197.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:38:20 managed-node2 systemd[25539]: Started podman-36205.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:38:20 managed-node2 systemd[25539]: Started podman-36213.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:38:20 managed-node2 systemd[25539]: Started podman-36222.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:38:20 managed-node2 sudo[36175]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:38:21 managed-node2 platform-python[36351]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:38:21 managed-node2 platform-python[36476]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:22 managed-node2 platform-python[36599]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 12 12:38:22 managed-node2 platform-python[36663]: ansible-file Invoked with owner=podman_basic_user group=3001 mode=0644 dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _original_basename=tmpxhmslwri recurse=False state=file path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:22 managed-node2 sudo[36786]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kddzobyvwijhudrubugwpxpljmgfafhb ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338302.4767652-18857-261073031296101/AnsiballZ_podman_play.py'\nJul 12 12:38:22 managed-node2 sudo[36786]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:38:22 managed-node2 platform-python[36789]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:38:22 managed-node2 systemd[25539]: Started podman-36797.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:38:22 managed-node2 platform-python[36789]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml\nJul 12 12:38:22 managed-node2 platform-python[36789]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: \nJul 12 12:38:22 managed-node2 platform-python[36789]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time=\"2025-07-12T12:38:22-04:00\" level=info msg=\"/bin/podman filtering at log level debug\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2025-07-12T12:38:22-04:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Using graph root /home/podman_basic_user/.local/share/containers/storage\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Using run root /run/user/3001/containers\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Using tmp dir /run/user/3001/libpod/tmp\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Using transient store: false\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Cached value indicated that metacopy is not being used\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Cached value indicated that native-diff is usable\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Initializing event backend file\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\n time=\"2025-07-12T12:38:22-04:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-12 12:37:16.97600692 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Successfully loaded 2 networks\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Looking up image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\\\"\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566)\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:948a5af191001d7e33941a1345a44d1614f9cb020baf6f0929c28056fca3e566\\\"\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Pod using bridge network mode\"\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Got pod cgroup as /libpod_parent/36ca61264e7e11a7ce277e40b51ec55a9afdcde0d1c0d8549c5c14e962eb5314\"\n Error: adding pod to state: name \"httpd1\" is in use: pod already exists\n time=\"2025-07-12T12:38:22-04:00\" level=debug msg=\"Shutting down engines\"\nJul 12 12:38:22 managed-node2 platform-python[36789]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125\nJul 12 12:38:22 managed-node2 sudo[36786]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:38:23 managed-node2 platform-python[36952]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 12 12:38:24 managed-node2 platform-python[37076]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:38:25 managed-node2 platform-python[37201]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:26 managed-node2 platform-python[37325]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:27 managed-node2 platform-python[37448]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:28 managed-node2 platform-python[37737]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:38:28 managed-node2 platform-python[37862]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:29 managed-node2 platform-python[37985]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 12 12:38:29 managed-node2 platform-python[38049]: ansible-file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd2.yml _original_basename=tmpl5_fx80_ recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd2.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:29 managed-node2 platform-python[38172]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:38:29 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312.slice.\n-- Subject: Unit machine-libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:38:29 managed-node2 platform-python[38172]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml\nJul 12 12:38:29 managed-node2 platform-python[38172]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: \nJul 12 12:38:29 managed-node2 platform-python[38172]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time=\"2025-07-12T12:38:29-04:00\" level=info msg=\"/usr/bin/podman filtering at log level debug\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2025-07-12T12:38:29-04:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Using run root /run/containers/storage\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Using tmp dir /run/libpod\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Using transient store: false\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\n time=\"2025-07-12T12:38:29-04:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Initializing event backend file\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\n time=\"2025-07-12T12:38:29-04:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-12 12:34:58.774465298 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Successfully loaded 2 networks\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Looking up image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\\\"\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2)\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:4ce5ee3a59da1259483b763f184f2044353762b4f6ba8efda812d2addc268cb2\\\"\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Pod using bridge network mode\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312.slice for parent machine.slice and name libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312.slice\"\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_ddf2b8910174a561d4b778fdee1ba22da1f70c708c37abf8b71fceed46309312.slice\"\n Error: adding pod to state: name \"httpd2\" is in use: pod already exists\n time=\"2025-07-12T12:38:29-04:00\" level=debug msg=\"Shutting down engines\"\nJul 12 12:38:29 managed-node2 platform-python[38172]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125\nJul 12 12:38:31 managed-node2 platform-python[38333]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:38:32 managed-node2 platform-python[38458]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:33 managed-node2 platform-python[38582]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:33 managed-node2 platform-python[38705]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:35 managed-node2 platform-python[38995]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:38:35 managed-node2 platform-python[39120]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:35 managed-node2 platform-python[39243]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 12 12:38:36 managed-node2 platform-python[39307]: ansible-file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd3.yml _original_basename=tmpb1ttu3ws recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd3.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:36 managed-node2 platform-python[39430]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:38:36 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_76dbf1d1588fd5e46783829b7ad9b35366a8e6eaea8987a62e2fa617ab2d64d4.slice.\n-- Subject: Unit machine-libpod_pod_76dbf1d1588fd5e46783829b7ad9b35366a8e6eaea8987a62e2fa617ab2d64d4.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_76dbf1d1588fd5e46783829b7ad9b35366a8e6eaea8987a62e2fa617ab2d64d4.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:38:37 managed-node2 sudo[39591]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-reodnpkicydeipvtrpezylgtxbcjdhgz ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338317.3985052-19629-279558676694792/AnsiballZ_command.py'\nJul 12 12:38:37 managed-node2 sudo[39591]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:38:37 managed-node2 platform-python[39594]: ansible-command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:37 managed-node2 systemd[25539]: Started podman-39603.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:38:37 managed-node2 sudo[39591]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:38:38 managed-node2 platform-python[39733]: ansible-command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:38 managed-node2 platform-python[39864]: ansible-command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:38 managed-node2 sudo[39995]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jdxtrtiiowdglcaeyhyrkpgebggwzera ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338318.6450086-19659-70124315420202/AnsiballZ_command.py'\nJul 12 12:38:38 managed-node2 sudo[39995]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:38:38 managed-node2 platform-python[39998]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:38 managed-node2 sudo[39995]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:38:39 managed-node2 platform-python[40124]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:39 managed-node2 platform-python[40250]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:40 managed-node2 platform-python[40376]: ansible-uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:40 managed-node2 platform-python[40500]: ansible-uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:40 managed-node2 platform-python[40624]: ansible-uri Invoked with url=http://localhost:15003/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:43 managed-node2 platform-python[40873]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:44 managed-node2 platform-python[41002]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:38:47 managed-node2 platform-python[41127]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None\nJul 12 12:38:48 managed-node2 platform-python[41251]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:38:48 managed-node2 platform-python[41376]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:49 managed-node2 platform-python[41500]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:50 managed-node2 platform-python[41624]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:50 managed-node2 platform-python[41748]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:38:51 managed-node2 sudo[41873]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-phdckzktiusimljvxxeqcswlbkptcgje ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338330.9997387-20292-117543446474536/AnsiballZ_systemd.py'\nJul 12 12:38:51 managed-node2 sudo[41873]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:38:51 managed-node2 platform-python[41876]: ansible-systemd Invoked with name= scope=user state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 12 12:38:51 managed-node2 systemd[25539]: Reloading.\nJul 12 12:38:51 managed-node2 systemd[25539]: Stopping A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit UNIT has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has begun shutting down.\nJul 12 12:38:51 managed-node2 kernel: cni-podman1: port 1(veth73ffc199) entered disabled state\nJul 12 12:38:51 managed-node2 kernel: device veth73ffc199 left promiscuous mode\nJul 12 12:38:51 managed-node2 kernel: cni-podman1: port 1(veth73ffc199) entered disabled state\nJul 12 12:38:51 managed-node2 podman[41892]: Pods stopped:\nJul 12 12:38:51 managed-node2 podman[41892]: e6000b000d16f866b55a1a19dd3fb3262c82c732a816136db57b7e8368f0a32a\nJul 12 12:38:51 managed-node2 podman[41892]: Pods removed:\nJul 12 12:38:51 managed-node2 podman[41892]: e6000b000d16f866b55a1a19dd3fb3262c82c732a816136db57b7e8368f0a32a\nJul 12 12:38:51 managed-node2 podman[41892]: Secrets removed:\nJul 12 12:38:51 managed-node2 podman[41892]: Volumes removed:\nJul 12 12:38:51 managed-node2 systemd[25539]: Stopped A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 12 12:38:51 managed-node2 sudo[41873]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:38:52 managed-node2 platform-python[42165]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:38:52 managed-node2 sudo[42290]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nazeochktfswzfvlptenlckqnldzbmyv ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338332.4280062-20367-151061681885350/AnsiballZ_podman_play.py'\nJul 12 12:38:52 managed-node2 sudo[42290]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:38:52 managed-node2 platform-python[42293]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:38:52 managed-node2 platform-python[42293]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml\nJul 12 12:38:52 managed-node2 systemd[25539]: Started podman-42301.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:38:52 managed-node2 platform-python[42293]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman kube play --down /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml\nJul 12 12:38:52 managed-node2 platform-python[42293]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped:\n Pods removed:\n Secrets removed:\n Volumes removed:\nJul 12 12:38:52 managed-node2 platform-python[42293]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: \nJul 12 12:38:52 managed-node2 platform-python[42293]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0\nJul 12 12:38:52 managed-node2 sudo[42290]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:38:53 managed-node2 platform-python[42430]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:38:54 managed-node2 platform-python[42553]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 12 12:38:54 managed-node2 platform-python[42677]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:38:56 managed-node2 platform-python[42802]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:38:56 managed-node2 platform-python[42926]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 12 12:38:56 managed-node2 systemd[1]: Reloading.\nJul 12 12:38:57 managed-node2 systemd[1]: Stopping A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun shutting down.\nJul 12 12:38:57 managed-node2 systemd[1]: libpod-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.scope has successfully entered the 'dead' state.\nJul 12 12:38:57 managed-node2 systemd[1]: libpod-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a.scope completed and consumed the indicated resources.\nJul 12 12:38:57 managed-node2 systemd[1]: libpod-fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.scope has successfully entered the 'dead' state.\nJul 12 12:38:57 managed-node2 systemd[1]: libpod-fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.scope: Consumed 34ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-fc69bc9fefcf9e53c10eb3db0832c9845398df44e65e259f53b391a0a83be149.scope completed and consumed the indicated resources.\nJul 12 12:38:57 managed-node2 dnsmasq[29808]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses\nJul 12 12:38:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay-ad05b883b876cb925ec05b9fafaf9a8a37fd48a25d5d54b9615f3f4cdf0bd3b3-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-ad05b883b876cb925ec05b9fafaf9a8a37fd48a25d5d54b9615f3f4cdf0bd3b3-merged.mount has successfully entered the 'dead' state.\nJul 12 12:38:57 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered disabled state\nJul 12 12:38:57 managed-node2 kernel: device vethec9deee2 left promiscuous mode\nJul 12 12:38:57 managed-node2 kernel: cni-podman1: port 1(vethec9deee2) entered disabled state\nJul 12 12:38:57 managed-node2 systemd[1]: run-netns-netns\\x2d52414ca9\\x2df342\\x2dd1f3\\x2d8cce\\x2d232fb04744c1.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-netns-netns\\x2d52414ca9\\x2df342\\x2dd1f3\\x2d8cce\\x2d232fb04744c1.mount has successfully entered the 'dead' state.\nJul 12 12:38:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-39ecccf0916e234b3c00482d1ee8ceaec0f66add1787b0423c79f4c596b7e86a-userdata-shm.mount has successfully entered the 'dead' state.\nJul 12 12:38:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay-2d63d07bf8161ced4731534605fa38c1618204d50fc3a412c2eb303e296f3b5e-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-2d63d07bf8161ced4731534605fa38c1618204d50fc3a412c2eb303e296f3b5e-merged.mount has successfully entered the 'dead' state.\nJul 12 12:38:57 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice.\n-- Subject: Unit machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice has finished shutting down.\nJul 12 12:38:57 managed-node2 systemd[1]: machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice: Consumed 67ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5.slice completed and consumed the indicated resources.\nJul 12 12:38:57 managed-node2 podman[42962]: Pods stopped:\nJul 12 12:38:57 managed-node2 podman[42962]: ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5\nJul 12 12:38:57 managed-node2 podman[42962]: Pods removed:\nJul 12 12:38:57 managed-node2 podman[42962]: ef90747497e95de730b82c4fe983c4414efe300eee5a4657597aa70a039709f5\nJul 12 12:38:57 managed-node2 podman[42962]: Secrets removed:\nJul 12 12:38:57 managed-node2 podman[42962]: Volumes removed:\nJul 12 12:38:57 managed-node2 systemd[1]: libpod-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.scope has successfully entered the 'dead' state.\nJul 12 12:38:57 managed-node2 systemd[1]: libpod-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.scope: Consumed 34ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315.scope completed and consumed the indicated resources.\nJul 12 12:38:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-af093d2b9934d39e77095e3e9c2064847e909d7955ee03e30e835bf99ee05315-userdata-shm.mount has successfully entered the 'dead' state.\nJul 12 12:38:57 managed-node2 dnsmasq[29808]: exiting on receipt of SIGTERM\nJul 12 12:38:57 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has successfully entered the 'dead' state.\nJul 12 12:38:57 managed-node2 systemd[1]: Stopped A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished shutting down.\nJul 12 12:38:58 managed-node2 platform-python[43238]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:38:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay-495aa6291e9f835076198c3e1c7b8cf1909ca8b5400bdf0e5a851ba0c44119c1-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-495aa6291e9f835076198c3e1c7b8cf1909ca8b5400bdf0e5a851ba0c44119c1-merged.mount has successfully entered the 'dead' state.\nJul 12 12:38:58 managed-node2 platform-python[43363]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:38:58 managed-node2 platform-python[43363]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/httpd2.yml\nJul 12 12:38:58 managed-node2 platform-python[43363]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman kube play --down /etc/containers/ansible-kubernetes.d/httpd2.yml\nJul 12 12:38:58 managed-node2 platform-python[43363]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped:\n Pods removed:\n Secrets removed:\n Volumes removed:\nJul 12 12:38:58 managed-node2 platform-python[43363]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: \nJul 12 12:38:58 managed-node2 platform-python[43363]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0\nJul 12 12:38:58 managed-node2 platform-python[43499]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:39:00 managed-node2 platform-python[43622]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:01 managed-node2 platform-python[43747]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:02 managed-node2 platform-python[43871]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 12 12:39:02 managed-node2 systemd[1]: Reloading.\nJul 12 12:39:02 managed-node2 systemd[1]: Stopping A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun shutting down.\nJul 12 12:39:02 managed-node2 systemd[1]: libpod-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.scope has successfully entered the 'dead' state.\nJul 12 12:39:02 managed-node2 systemd[1]: libpod-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70.scope completed and consumed the indicated resources.\nJul 12 12:39:02 managed-node2 systemd[1]: libpod-e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.scope has successfully entered the 'dead' state.\nJul 12 12:39:02 managed-node2 systemd[1]: libpod-e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-e32e763a06601c4335532ab9a6ad607786ddff3cd26373bf5cb50e0515b1874e.scope completed and consumed the indicated resources.\nJul 12 12:39:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay-9f3109ed9592a16625c27d2daaac765746798fb973c8fcb3160951dbc3c83474-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-9f3109ed9592a16625c27d2daaac765746798fb973c8fcb3160951dbc3c83474-merged.mount has successfully entered the 'dead' state.\nJul 12 12:39:02 managed-node2 kernel: cni-podman1: port 2(veth3fe74d71) entered disabled state\nJul 12 12:39:02 managed-node2 kernel: device veth3fe74d71 left promiscuous mode\nJul 12 12:39:02 managed-node2 kernel: cni-podman1: port 2(veth3fe74d71) entered disabled state\nJul 12 12:39:02 managed-node2 systemd[1]: run-netns-netns\\x2dda1f9efe\\x2d2607\\x2d2465\\x2d3389\\x2d63a80a061169.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-netns-netns\\x2dda1f9efe\\x2d2607\\x2d2465\\x2d3389\\x2d63a80a061169.mount has successfully entered the 'dead' state.\nJul 12 12:39:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-304f3418ec23f8181252948035f4fc6aaf8d1a2f8b8005c5e6a870299c6b6a70-userdata-shm.mount has successfully entered the 'dead' state.\nJul 12 12:39:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay-b047a8f535e44a79e89943c24ecd0f40472ad6c74487b61c695a5612de0f66e9-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-b047a8f535e44a79e89943c24ecd0f40472ad6c74487b61c695a5612de0f66e9-merged.mount has successfully entered the 'dead' state.\nJul 12 12:39:02 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice.\n-- Subject: Unit machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice has finished shutting down.\nJul 12 12:39:02 managed-node2 systemd[1]: machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice: Consumed 66ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2.slice completed and consumed the indicated resources.\nJul 12 12:39:02 managed-node2 podman[43907]: Pods stopped:\nJul 12 12:39:02 managed-node2 podman[43907]: ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2\nJul 12 12:39:02 managed-node2 podman[43907]: Pods removed:\nJul 12 12:39:02 managed-node2 podman[43907]: ad346e614cdd59ff13800f29379b2b28b22fd9c3218f86832de07f6a6a244fd2\nJul 12 12:39:02 managed-node2 podman[43907]: Secrets removed:\nJul 12 12:39:02 managed-node2 podman[43907]: Volumes removed:\nJul 12 12:39:02 managed-node2 systemd[1]: libpod-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.scope has successfully entered the 'dead' state.\nJul 12 12:39:02 managed-node2 systemd[1]: libpod-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.scope: Consumed 36ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b.scope completed and consumed the indicated resources.\nJul 12 12:39:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-7884a8a9b17bef7273599c60ded3757456bf19cb88d6a2ff7ef33adab37d754b-userdata-shm.mount has successfully entered the 'dead' state.\nJul 12 12:39:02 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has successfully entered the 'dead' state.\nJul 12 12:39:02 managed-node2 systemd[1]: Stopped A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished shutting down.\nJul 12 12:39:03 managed-node2 platform-python[44179]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay-a2ca6c0802e64aa881912046899069f9906c3a3ecb7fd7f0e60445f767b453fb-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-a2ca6c0802e64aa881912046899069f9906c3a3ecb7fd7f0e60445f767b453fb-merged.mount has successfully entered the 'dead' state.\nJul 12 12:39:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:39:03 managed-node2 platform-python[44304]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 12 12:39:03 managed-node2 platform-python[44304]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/httpd3.yml\nJul 12 12:39:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:39:04 managed-node2 platform-python[44440]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:39:05 managed-node2 platform-python[44563]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None\nJul 12 12:39:05 managed-node2 platform-python[44687]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:05 managed-node2 sudo[44812]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-efsiwiyrgguftoqfmdsvrczsjrcdxihg ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338345.768981-21015-270850189165831/AnsiballZ_podman_container_info.py'\nJul 12 12:39:05 managed-node2 sudo[44812]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:39:06 managed-node2 platform-python[44815]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None\nJul 12 12:39:06 managed-node2 systemd[25539]: Started podman-44817.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:39:06 managed-node2 sudo[44812]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:39:06 managed-node2 sudo[44946]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lgxpwgdjqpsoqirugaueifldgtghyuxf ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338346.3029222-21038-175701710527734/AnsiballZ_command.py'\nJul 12 12:39:06 managed-node2 sudo[44946]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:39:06 managed-node2 platform-python[44949]: ansible-command Invoked with _raw_params=podman network ls -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:06 managed-node2 systemd[25539]: Started podman-44951.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:39:06 managed-node2 sudo[44946]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:39:06 managed-node2 sudo[45105]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-plsfjelikobxnwisunpzotpprpzjinoh ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338346.7950253-21068-8970032815672/AnsiballZ_command.py'\nJul 12 12:39:06 managed-node2 sudo[45105]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:39:07 managed-node2 platform-python[45108]: ansible-command Invoked with _raw_params=podman secret ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:07 managed-node2 systemd[25539]: Started podman-45110.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:39:07 managed-node2 sudo[45105]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:39:07 managed-node2 platform-python[45239]: ansible-command Invoked with removes=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl disable-linger podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None stdin=None\nJul 12 12:39:07 managed-node2 systemd[1]: Stopping User Manager for UID 3001...\n-- Subject: Unit user@3001.service has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user@3001.service has begun shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Stopping podman-pause-5a039c99.scope.\n-- Subject: Unit UNIT has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has begun shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Stopped target Default.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Removed slice podman\\x2dkube.slice.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Stopping D-Bus User Message Bus...\n-- Subject: Unit UNIT has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has begun shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Stopped D-Bus User Message Bus.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Stopped target Basic System.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Stopped target Timers.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Stopped Mark boot as successful after the user session has run 2 minutes.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Stopped target Paths.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Stopped target Sockets.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Closed D-Bus User Message Bus Socket.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Stopped podman-pause-5a039c99.scope.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Removed slice user.slice.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[25539]: Reached target Shutdown.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:39:07 managed-node2 systemd[25539]: Started Exit the Session.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:39:07 managed-node2 systemd[25539]: Reached target Exit the Session.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:39:07 managed-node2 systemd[25545]: pam_unix(systemd-user:session): session closed for user podman_basic_user\nJul 12 12:39:07 managed-node2 systemd[1]: user@3001.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit user@3001.service has successfully entered the 'dead' state.\nJul 12 12:39:07 managed-node2 systemd[1]: Stopped User Manager for UID 3001.\n-- Subject: Unit user@3001.service has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user@3001.service has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[1]: Stopping User runtime directory /run/user/3001...\n-- Subject: Unit user-runtime-dir@3001.service has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user-runtime-dir@3001.service has begun shutting down.\nJul 12 12:39:07 managed-node2 systemd[1]: run-user-3001.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-user-3001.mount has successfully entered the 'dead' state.\nJul 12 12:39:07 managed-node2 systemd[1]: user-runtime-dir@3001.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit user-runtime-dir@3001.service has successfully entered the 'dead' state.\nJul 12 12:39:07 managed-node2 systemd[1]: Stopped User runtime directory /run/user/3001.\n-- Subject: Unit user-runtime-dir@3001.service has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user-runtime-dir@3001.service has finished shutting down.\nJul 12 12:39:07 managed-node2 systemd[1]: Removed slice User Slice of UID 3001.\n-- Subject: Unit user-3001.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user-3001.slice has finished shutting down.\nJul 12 12:39:07 managed-node2 platform-python[45371]: ansible-command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:08 managed-node2 sudo[45495]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gazcsbdiijzpmpohefmybwwgcnpxuufr ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338348.4017277-21172-132611654922840/AnsiballZ_command.py'\nJul 12 12:39:08 managed-node2 sudo[45495]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:39:08 managed-node2 platform-python[45498]: ansible-command Invoked with _raw_params=podman pod exists httpd1 warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:08 managed-node2 sudo[45495]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:39:09 managed-node2 platform-python[45628]: ansible-command Invoked with _raw_params=podman pod exists httpd2 warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:09 managed-node2 platform-python[45758]: ansible-command Invoked with _raw_params=podman pod exists httpd3 warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:09 managed-node2 sudo[45888]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pizwdchaqbkhharmotzkhmtxjzrasqsn ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752338349.5796022-21223-271577239366846/AnsiballZ_command.py'\nJul 12 12:39:09 managed-node2 sudo[45888]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 12 12:39:09 managed-node2 platform-python[45891]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:09 managed-node2 sudo[45888]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 12 12:39:10 managed-node2 platform-python[46017]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:10 managed-node2 platform-python[46143]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:10 managed-node2 platform-python[46269]: ansible-stat Invoked with path=/var/lib/systemd/linger/podman_basic_user follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:13 managed-node2 platform-python[46517]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:14 managed-node2 platform-python[46646]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 12 12:39:15 managed-node2 platform-python[46770]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:17 managed-node2 platform-python[46895]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None\nJul 12 12:39:18 managed-node2 platform-python[47019]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:18 managed-node2 platform-python[47144]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:19 managed-node2 platform-python[47268]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:20 managed-node2 platform-python[47392]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:20 managed-node2 platform-python[47516]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:21 managed-node2 platform-python[47639]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:21 managed-node2 platform-python[47762]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:39:23 managed-node2 platform-python[47885]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 12 12:39:23 managed-node2 platform-python[48009]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:24 managed-node2 platform-python[48134]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:25 managed-node2 platform-python[48258]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 12 12:39:26 managed-node2 platform-python[48385]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:26 managed-node2 platform-python[48508]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:39:27 managed-node2 platform-python[48631]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:29 managed-node2 platform-python[48756]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:29 managed-node2 platform-python[48880]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 12 12:39:30 managed-node2 platform-python[49007]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:30 managed-node2 platform-python[49130]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:39:31 managed-node2 platform-python[49253]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None\nJul 12 12:39:32 managed-node2 platform-python[49377]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:32 managed-node2 platform-python[49500]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:39:33 managed-node2 platform-python[49623]: ansible-file Invoked with path=/tmp/lsr__qviri30_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:39:36 managed-node2 platform-python[49785]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version', 'os_family'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d\nJul 12 12:39:36 managed-node2 platform-python[49912]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:36 managed-node2 platform-python[50035]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:39 managed-node2 platform-python[50283]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:40 managed-node2 platform-python[50412]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 12 12:39:41 managed-node2 platform-python[50536]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:43 managed-node2 platform-python[50700]: ansible-setup Invoked with gather_subset=['all'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d\nJul 12 12:39:46 managed-node2 platform-python[50852]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:47 managed-node2 platform-python[50975]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:49 managed-node2 platform-python[51223]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:50 managed-node2 platform-python[51352]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 12 12:39:50 managed-node2 platform-python[51476]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:56 managed-node2 platform-python[51640]: ansible-setup Invoked with gather_subset=['all'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d\nJul 12 12:39:57 managed-node2 platform-python[51792]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:39:57 managed-node2 platform-python[51915]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:39:58 managed-node2 platform-python[52039]: ansible-dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:40:02 managed-node2 platform-python[52167]: ansible-dnf Invoked with name=['certmonger'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:40:04 managed-node2 dbus-daemon[601]: [system] Reloaded configuration\nJul 12 12:40:04 managed-node2 dbus-daemon[601]: [system] Reloaded configuration\nJul 12 12:40:04 managed-node2 dbus-daemon[601]: [system] Reloaded configuration\nJul 12 12:40:04 managed-node2 dbus-daemon[601]: [system] Reloaded configuration\nJul 12 12:40:04 managed-node2 dbus-daemon[601]: [system] Reloaded configuration\nJul 12 12:40:04 managed-node2 systemd[1]: Reloading.\nJul 12 12:40:05 managed-node2 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update.\n-- Subject: Unit run-rbd3345bfad0b449fb2e69833e5ca39b9.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit run-rbd3345bfad0b449fb2e69833e5ca39b9.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:40:05 managed-node2 systemd[1]: Starting man-db-cache-update.service...\n-- Subject: Unit man-db-cache-update.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit man-db-cache-update.service has begun starting up.\nJul 12 12:40:05 managed-node2 systemd[1]: Reloading.\nJul 12 12:40:05 managed-node2 systemd[1]: man-db-cache-update.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit man-db-cache-update.service has successfully entered the 'dead' state.\nJul 12 12:40:05 managed-node2 systemd[1]: Started man-db-cache-update.service.\n-- Subject: Unit man-db-cache-update.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit man-db-cache-update.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:40:05 managed-node2 systemd[1]: run-rbd3345bfad0b449fb2e69833e5ca39b9.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-rbd3345bfad0b449fb2e69833e5ca39b9.service has successfully entered the 'dead' state.\nJul 12 12:40:06 managed-node2 platform-python[52799]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:40:06 managed-node2 platform-python[52922]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:40:07 managed-node2 platform-python[53045]: ansible-systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 12 12:40:07 managed-node2 systemd[1]: Reloading.\nJul 12 12:40:07 managed-node2 systemd[1]: Starting Certificate monitoring and PKI enrollment...\n-- Subject: Unit certmonger.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit certmonger.service has begun starting up.\nJul 12 12:40:07 managed-node2 systemd[1]: Started Certificate monitoring and PKI enrollment.\n-- Subject: Unit certmonger.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit certmonger.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 12 12:40:08 managed-node2 platform-python[53238]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=#\n # Ansible managed\n #\n # system_role:certificate\n booted=True provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 certmonger[53254]: Certificate in file \"/etc/pki/tls/certs/quadlet_demo.crt\" issued by CA and saved.\nJul 12 12:40:08 managed-node2 certmonger[53081]: 2025-07-12 12:40:08 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:08 managed-node2 platform-python[53376]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt\nJul 12 12:40:09 managed-node2 platform-python[53499]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key\nJul 12 12:40:09 managed-node2 platform-python[53622]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt\nJul 12 12:40:10 managed-node2 platform-python[53745]: ansible-command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:40:10 managed-node2 certmonger[53081]: 2025-07-12 12:40:10 [53081] Wrote to /var/lib/certmonger/requests/20250712164008\nJul 12 12:40:10 managed-node2 platform-python[53869]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:40:10 managed-node2 platform-python[53992]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:40:11 managed-node2 platform-python[54115]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 12 12:40:11 managed-node2 platform-python[54238]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:40:12 managed-node2 platform-python[54361]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:40:14 managed-node2 platform-python[54609]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:40:15 managed-node2 platform-python[54738]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 12 12:40:16 managed-node2 platform-python[54862]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:40:17 managed-node2 platform-python[54987]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:40:18 managed-node2 platform-python[55110]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:40:18 managed-node2 platform-python[55233]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:40:19 managed-node2 platform-python[55357]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:40:22 managed-node2 platform-python[55480]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 12 12:40:22 managed-node2 platform-python[55607]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 12 12:40:23 managed-node2 platform-python[55734]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 12 12:40:24 managed-node2 platform-python[55857]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 12 12:40:26 managed-node2 platform-python[55980]: ansible-command Invoked with _raw_params=journalctl -ex warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:40:26 managed-node2 platform-python[56104]: ansible-command Invoked with _raw_params=podman ps -a warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:40:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay-metacopy\\x2dcheck3019378336-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-metacopy\\x2dcheck3019378336-merged.mount has successfully entered the 'dead' state.\nJul 12 12:40:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 12 12:40:27 managed-node2 platform-python[56234]: ansible-command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:40:27 managed-node2 platform-python[56364]: ansible-command Invoked with _raw_params=set -euo pipefail; systemctl list-units --all | grep quadlet _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:40:28 managed-node2 platform-python[56490]: ansible-command Invoked with _raw_params=ls -alrtF /etc/systemd/system warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:40:30 managed-node2 platform-python[56739]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:40:31 managed-node2 platform-python[56868]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 12 12:40:33 managed-node2 platform-python[56993]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 12 12:40:36 managed-node2 platform-python[57116]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 12 12:40:36 managed-node2 platform-python[57243]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 12 12:40:37 managed-node2 platform-python[57370]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 12 12:40:38 managed-node2 platform-python[57493]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 12 12:40:39 managed-node2 platform-python[57616]: ansible-command Invoked with _raw_params=exec 1>&2\n set -x\n set -o pipefail\n systemctl list-units --plain -l --all | grep quadlet || :\n systemctl list-unit-files --all | grep quadlet || :\n systemctl list-units --plain --failed -l --all | grep quadlet || :\n _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 12 12:40:40 managed-node2 platform-python[57746]: ansible-command Invoked with _raw_params=journalctl -ex warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None", "task_name": "Get journald", "task_path": "/tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:209" } ] SYSTEM ROLES ERRORS END v1 TASKS RECAP ******************************************************************** Saturday 12 July 2025 12:40:40 -0400 (0:00:00.395) 0:00:44.807 ********* =============================================================================== fedora.linux_system_roles.certificate : Ensure provider packages are installed --- 4.39s /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:15 fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed --- 3.60s /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5 fedora.linux_system_roles.firewall : Install firewalld ------------------ 2.84s /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:51 fedora.linux_system_roles.firewall : Install firewalld ------------------ 2.82s /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:51 fedora.linux_system_roles.firewall : Configure firewall ----------------- 2.37s /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:74 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.83s /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.47s /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.certificate : Slurp the contents of the files --- 1.10s /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:143 fedora.linux_system_roles.certificate : Remove files -------------------- 1.08s /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:174 fedora.linux_system_roles.firewall : Enable and start firewalld service --- 1.03s /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:30 fedora.linux_system_roles.firewall : Unmask firewalld service ----------- 1.02s /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:24 fedora.linux_system_roles.certificate : Ensure provider service is running --- 0.94s /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:76 Gathering Facts --------------------------------------------------------- 0.91s /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9 fedora.linux_system_roles.certificate : Ensure certificate requests ----- 0.81s /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:86 Debug ------------------------------------------------------------------- 0.72s /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:199 fedora.linux_system_roles.podman : Get user information ----------------- 0.53s /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 fedora.linux_system_roles.certificate : Check if system is ostree ------- 0.50s /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:10 fedora.linux_system_roles.certificate : Ensure pre-scripts hooks directory exists --- 0.49s /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:25 fedora.linux_system_roles.certificate : Run systemctl ------------------- 0.45s /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:22 Check ------------------------------------------------------------------- 0.43s /tmp/collections-JHR/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:148