ansible-playbook 2.9.27 config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.9/site-packages/ansible executable location = /usr/local/bin/ansible-playbook python version = 3.9.19 (main, May 16 2024, 11:40:09) [GCC 8.5.0 20210514 (Red Hat 8.5.0-22)] No config file found; using defaults [WARNING]: running playbook inside collection fedora.linux_system_roles Skipping callback 'actionable', as we already have a stdout callback. Skipping callback 'counter_enabled', as we already have a stdout callback. Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'dense', as we already have a stdout callback. Skipping callback 'dense', as we already have a stdout callback. Skipping callback 'full_skip', as we already have a stdout callback. Skipping callback 'json', as we already have a stdout callback. Skipping callback 'jsonl', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'null', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. Skipping callback 'selective', as we already have a stdout callback. Skipping callback 'skippy', as we already have a stdout callback. Skipping callback 'stderr', as we already have a stdout callback. Skipping callback 'unixy', as we already have a stdout callback. Skipping callback 'yaml', as we already have a stdout callback. PLAYBOOK: tests_quadlet_demo.yml *********************************************** 2 plays in /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml PLAY [all] ********************************************************************* META: ran handlers TASK [Include vault variables] ************************************************* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:5 Saturday 19 July 2025 12:40:53 -0400 (0:00:00.032) 0:00:00.032 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_test_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n" }, "mysql_container_root_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n" } }, "ansible_included_var_files": [ "/tmp/podman-cZT/tests/vars/vault-variables.yml" ], "changed": false } META: ran handlers META: ran handlers PLAY [Deploy the quadlet demo app] ********************************************* TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9 Saturday 19 July 2025 12:40:53 -0400 (0:00:00.026) 0:00:00.059 ********* ok: [managed-node2] META: ran handlers TASK [Test is only supported on x86_64] **************************************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:38 Saturday 19 July 2025 12:40:54 -0400 (0:00:01.063) 0:00:01.123 ********* skipping: [managed-node2] => {} META: TASK [Generate certificates] *************************************************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:51 Saturday 19 July 2025 12:40:54 -0400 (0:00:00.079) 0:00:01.202 ********* TASK [fedora.linux_system_roles.certificate : Set version specific variables] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:2 Saturday 19 July 2025 12:40:54 -0400 (0:00:00.044) 0:00:01.247 ********* included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.certificate : Ensure ansible_facts used by role] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:2 Saturday 19 July 2025 12:40:54 -0400 (0:00:00.026) 0:00:01.273 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.certificate : Check if system is ostree] ******* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:10 Saturday 19 July 2025 12:40:54 -0400 (0:00:00.016) 0:00:01.289 ********* ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.certificate : Set flag to indicate system is ostree] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:15 Saturday 19 July 2025 12:40:55 -0400 (0:00:00.452) 0:00:01.742 ********* ok: [managed-node2] => { "ansible_facts": { "__certificate_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.certificate : Run systemctl] ******************* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:22 Saturday 19 July 2025 12:40:55 -0400 (0:00:00.027) 0:00:01.770 ********* ok: [managed-node2] => { "changed": false, "cmd": [ "systemctl", "is-system-running" ], "delta": "0:00:00.007506", "end": "2025-07-19 12:40:55.467554", "failed_when_result": false, "rc": 0, "start": "2025-07-19 12:40:55.460048" } STDOUT: running TASK [fedora.linux_system_roles.certificate : Require installed systemd] ******* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:30 Saturday 19 July 2025 12:40:55 -0400 (0:00:00.440) 0:00:02.210 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.certificate : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:35 Saturday 19 July 2025 12:40:55 -0400 (0:00:00.018) 0:00:02.229 ********* ok: [managed-node2] => { "ansible_facts": { "__certificate_is_booted": true }, "changed": false } TASK [fedora.linux_system_roles.certificate : Set platform/version specific variables] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:40 Saturday 19 July 2025 12:40:55 -0400 (0:00:00.020) 0:00:02.250 ********* skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS_8.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS_8.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5 Saturday 19 July 2025 12:40:55 -0400 (0:00:00.035) 0:00:02.285 ********* changed: [managed-node2] => { "changed": true, "rc": 0, "results": [ "Installed: python3-pyasn1-0.3.7-6.el8.noarch" ] } lsrpackages: python3-cryptography python3-dbus python3-pyasn1 TASK [fedora.linux_system_roles.certificate : Ensure provider packages are installed] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:15 Saturday 19 July 2025 12:40:59 -0400 (0:00:03.623) 0:00:05.908 ********* changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "rc": 0, "results": [ "Installed: xmlrpc-c-client-1.51.0-9.el8.x86_64", "Installed: xmlrpc-c-1.51.0-9.el8.x86_64", "Installed: certmonger-0.79.17-2.el8.x86_64" ] } lsrpackages: certmonger TASK [fedora.linux_system_roles.certificate : Ensure pre-scripts hooks directory exists] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:25 Saturday 19 July 2025 12:41:03 -0400 (0:00:04.396) 0:00:10.305 ********* changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/etc/certmonger//pre-scripts", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.certificate : Ensure post-scripts hooks directory exists] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:49 Saturday 19 July 2025 12:41:04 -0400 (0:00:00.477) 0:00:10.783 ********* changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/etc/certmonger//post-scripts", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.certificate : Ensure provider service is running] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:76 Saturday 19 July 2025 12:41:04 -0400 (0:00:00.373) 0:00:11.156 ********* changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "enabled": true, "name": "certmonger", "state": "started", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "basic.target systemd-journald.socket network.target dbus.service system.slice syslog.target sysinit.target dbus.socket", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedorahosted.certmonger", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Certificate monitoring and PKI enrollment", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/certmonger (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/certmonger.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "certmonger.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "certmonger.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PIDFile": "/run/certmonger.pid", "PartOf": "dbus.service", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target dbus.socket", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.certificate : Ensure certificate requests] ***** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:86 Saturday 19 July 2025 12:41:05 -0400 (0:00:00.998) 0:00:12.155 ********* changed: [managed-node2] => (item={'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}) => { "ansible_loop_var": "item", "changed": true, "item": { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } } MSG: Certificate requested (new). TASK [fedora.linux_system_roles.certificate : Check if test mode is supported] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:138 Saturday 19 July 2025 12:41:06 -0400 (0:00:00.948) 0:00:13.104 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.certificate : Slurp the contents of the files] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:143 Saturday 19 July 2025 12:41:06 -0400 (0:00:00.017) 0:00:13.122 ********* ok: [managed-node2] => (item=['cert', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnakNDQW1xZ0F3SUJBZ0lRZE03MDlnblRTcFdLYzlxMmtSK2J5REFOQmdrcWhraUc5dzBCQVFzRkFEQlEKTVNBd0hnWURWUVFEREJkTWIyTmhiQ0JUYVdkdWFXNW5JRUYxZEdodmNtbDBlVEVzTUNvR0ExVUVBd3dqTnpSagpaV1kwWmpZdE1EbGtNelJoT1RVdE9HRTNNMlJoWWpZdE9URXhaamxpWXpjd0hoY05NalV3TnpFNU1UWTBNVEEyCldoY05Nall3TnpFNU1UWTBNVEExV2pBVU1SSXdFQVlEVlFRREV3bHNiMk5oYkdodmMzUXdnZ0VpTUEwR0NTcUcKU0liM0RRRUJBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRRFplVHBwZzRvYWFpOE0rZzNZOVROSWZiZW96V2o2OGtueAoyK05TWi9mUkZ0TGdjcHJzWVVvaG5sMThMNndoc2VaSGgxZGRZNjhjM2hDV2JSYjFDNlI0YlFraDlvU0NDZnI1CnZrRld1cDROajNXNjNPSnFNdlJtOTlwUkxHdVVmK0FKZFF0RVFQUzhuUjRhMWd3eWpsRkxlaHAvc3R0VEJ2ZWIKdVF0NktUOWJQYktVYTFBcytxc0xTdUdEeXdFK29QWU15NVBMT3VNNkhJZnJUZmFla0JVdnBjS0FKWjgwVTl2UApUTThpRUlsL1VKY0dzWXdWZGZram5SOWlGQzhpcS9vdGszeHplUXVkVDZ4Z3BjTWpiRGI1TDVabUxmc05lSWZtCkw0S3haUWhqS2lTL2ZVbzhkOE9oYlNlME9WVDlBekVPTU9LL0xNNW84WHdEeTNDZ0FVcVZBZ01CQUFHamdaTXcKZ1pBd0N3WURWUjBQQkFRREFnV2dNQlFHQTFVZEVRUU5NQXVDQ1d4dlkyRnNhRzl6ZERBZEJnTlZIU1VFRmpBVQpCZ2dyQmdFRkJRY0RBUVlJS3dZQkJRVUhBd0l3REFZRFZSMFRBUUgvQkFJd0FEQWRCZ05WSFE0RUZnUVU4d2o4CmZyMWtRU3J4aUhsM21TRm9kazR1aEd3d0h3WURWUjBqQkJnd0ZvQVVjTklaTlZYam02elRqUGhrTThrTjM1OFMKeCtZd0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFJelZLWXhVZS9NdTEvZXY1VnhCVnpybUlUL1BYdkptR3dxZgpGSEdzOU5CdzRzS053ek1lMDRyRFdnclhCeFdOTG5vcnBRUTUzZFBaZytzSkpvZm5BSkNoN1ZLaEkwcGNqL0lsCnp4OUYxMWNqam9rZ2Jka0NRRVZqeDgxQ3c1T0toK2ptOS9UVGs0SDVxRHI5VU0rL002N3BNK1M4aFNJR3VTTDEKMUNmTVV0Tis5MEIwdTNQUGw2TzlhOW1SVStKUkNpSjAyNFg5b1FtTWZ1cUFsTzlIQ1ljRUVWK0pOOHd1MTZ5cworWGpXV0ZQNVZrY3hSdUMzMGM1OU9pRVJOWHVMNmFnUUJWeGk3NXlhdDZuQysrTEpPazhnRTc3UFh3WEVVSG9pCk1NLzZ0c1QwclNacDkycmVFL05hdUFBU0d6eE9kS2tyRDJWbHk4QnZYaEdWYzZ3alZNND0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=", "encoding": "base64", "item": [ "cert", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/certs/quadlet_demo.crt" } ok: [managed-node2] => (item=['key', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2UUlCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktjd2dnU2pBZ0VBQW9JQkFRRFplVHBwZzRvYWFpOE0KK2czWTlUTklmYmVveldqNjhrbngyK05TWi9mUkZ0TGdjcHJzWVVvaG5sMThMNndoc2VaSGgxZGRZNjhjM2hDVwpiUmIxQzZSNGJRa2g5b1NDQ2ZyNXZrRld1cDROajNXNjNPSnFNdlJtOTlwUkxHdVVmK0FKZFF0RVFQUzhuUjRhCjFnd3lqbEZMZWhwL3N0dFRCdmVidVF0NktUOWJQYktVYTFBcytxc0xTdUdEeXdFK29QWU15NVBMT3VNNkhJZnIKVGZhZWtCVXZwY0tBSlo4MFU5dlBUTThpRUlsL1VKY0dzWXdWZGZram5SOWlGQzhpcS9vdGszeHplUXVkVDZ4ZwpwY01qYkRiNUw1Wm1MZnNOZUlmbUw0S3haUWhqS2lTL2ZVbzhkOE9oYlNlME9WVDlBekVPTU9LL0xNNW84WHdECnkzQ2dBVXFWQWdNQkFBRUNnZ0VCQU1HTzduaGZCUm9CakNNR3BEQ0gvTDcyZGZLZDhHUDZKZXBJQlltZzA2YWIKbzlEMDhBS1hqUDFqRUYycHFKRE0zK0ttQ1JJcjJQM2VmL0VDNHozeXdNNFdiZHgycHZWK29JeUwwMmUzN1ZpdwphTjZLSGxMYlMwQWlOWU90LzRwS1RrTGNrOHlKNjhtOEpjRkk1YzVSY1BXMmxjTTJrNUxRVHEyaEF5QVZlamhmCmkxTEsvbjBqamtzaWFVV1VsM2FKVG44NGhFeER1clZhQnYyTXdvRlJHa3hMWUVGbUJJVkhSNkhxUks0Nm1zN3gKNUdZWnk4R3QwbG9wZk9XMXFGdCtYRS8vUkJGVFBYS3hiT2w0SW9VU1Z3RGJWY2ZkUHQ5VS9FWDJhSWYvZjZBOAplT3laanlNR1JYa1lBRkxPNjdjV0FOdUUwTjUrQlo5UGJwZ3pRNisxV2NFQ2dZRUE4dWgzMnlvSGJxd1JFdjlGClJlMWhOM2NlaW14bTNiWXhsbklTZ3N3YjJiVnVLOCtBdG1mK1hKWk15cUx1NmRQaE1LWHRYanE1eDNFOUcyNlkKVkJOSWd6L1A5a04waitkT0xsYjJrcHM3T3krQUQvMzBhenY5Sk5YMlhTQ2FmcmJqSEVYTVdTK01xcC9MRldVagorKzFucFllczI5dm9LR0t2ZXVIQTYxekZFc2tDZ1lFQTVUSFRoTFNUQ1F2S2pGeFFCb2N2S3ZFL3B0YUM0ckNNCmdNbTdyUStPazJOZTBFSmFGSTdOdGlwY0FUR3IrTzUzaWlmY0tRZHV5TENjaUZ0TGFWMzBQbThzcDFzT3JmSWEKUHowWEpLMUlLanZmWG9IcG1sM1IwRG9OOHlVZjI2OEVBcjBsTDJNQTJMUkNyYnNxZExGbCsremZqdmg2MVJvVQo5M3pxcXI5b2MyMENnWUIzakEyZzRWaHpFVi85MTRCQ1lBZWVncE9YVUs1M28zbFlMNkFJbTJlQmlCZEduSU1lClFxUWU4U0d1QU81UVNpVGtDbVYrSUt6YUNWTW5zeEJwSUd4cG0vZy9uUWM2NDMzSXRLZjVrMGhkWW8xR1VaZi8KUnJEWmRpd2Z2VElMOXZwaktiN2VqZFpQQ3dHcDcvTDd3K1ZreEJkQXhVUEd6SHJNb1ZxTyswTktjUUtCZ0RMMQpHQTBhUk5LdkQzS1RsNnRMNVIzTUU1WFpmYkkvQkk0aHpCdFhLU0QxNS8rdnNPMS9McGVBNVlIU2RHVGRTRENzCldoTzVObFZpaHZyK0tVOGM5NC9tRWV5SG4wVG5YaFNQVlpUdS9ldDk4bFRGMTVWZHBLZUNNTlhOZkQ3UXM3aVUKS3k5VStMdzFuOW1Xa29MdGFqcFAyUWlsa0ZIUEFiaGd5cC81L0pRbEFvR0FHS2Rpc2Jzc2FEMmpILzJlcFFtUwpiY1NXYWdBMTJzbzZlVE0rNlF1S1p2QlM0bk4zRmMwWk5QTHBZQ0NEMEkvVzlxcnVkQ29lVUNRVWdjbk5veWE1Ci8wSm9Ob1I5c0szOFZqajg3RmEvWmdLUzlIakdTVUkyNis4TEVVWmVyNEVkVEhrNXRZeFRIWkZOMzN2cDRCbUYKVEk4UzBiTk5HN2F4UFBnRlRFQWJ4OWs9Ci0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K", "encoding": "base64", "item": [ "key", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/private/quadlet_demo.key" } ok: [managed-node2] => (item=['ca', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnakNDQW1xZ0F3SUJBZ0lRZE03MDlnblRTcFdLYzlxMmtSK2J5REFOQmdrcWhraUc5dzBCQVFzRkFEQlEKTVNBd0hnWURWUVFEREJkTWIyTmhiQ0JUYVdkdWFXNW5JRUYxZEdodmNtbDBlVEVzTUNvR0ExVUVBd3dqTnpSagpaV1kwWmpZdE1EbGtNelJoT1RVdE9HRTNNMlJoWWpZdE9URXhaamxpWXpjd0hoY05NalV3TnpFNU1UWTBNVEEyCldoY05Nall3TnpFNU1UWTBNVEExV2pBVU1SSXdFQVlEVlFRREV3bHNiMk5oYkdodmMzUXdnZ0VpTUEwR0NTcUcKU0liM0RRRUJBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRRFplVHBwZzRvYWFpOE0rZzNZOVROSWZiZW96V2o2OGtueAoyK05TWi9mUkZ0TGdjcHJzWVVvaG5sMThMNndoc2VaSGgxZGRZNjhjM2hDV2JSYjFDNlI0YlFraDlvU0NDZnI1CnZrRld1cDROajNXNjNPSnFNdlJtOTlwUkxHdVVmK0FKZFF0RVFQUzhuUjRhMWd3eWpsRkxlaHAvc3R0VEJ2ZWIKdVF0NktUOWJQYktVYTFBcytxc0xTdUdEeXdFK29QWU15NVBMT3VNNkhJZnJUZmFla0JVdnBjS0FKWjgwVTl2UApUTThpRUlsL1VKY0dzWXdWZGZram5SOWlGQzhpcS9vdGszeHplUXVkVDZ4Z3BjTWpiRGI1TDVabUxmc05lSWZtCkw0S3haUWhqS2lTL2ZVbzhkOE9oYlNlME9WVDlBekVPTU9LL0xNNW84WHdEeTNDZ0FVcVZBZ01CQUFHamdaTXcKZ1pBd0N3WURWUjBQQkFRREFnV2dNQlFHQTFVZEVRUU5NQXVDQ1d4dlkyRnNhRzl6ZERBZEJnTlZIU1VFRmpBVQpCZ2dyQmdFRkJRY0RBUVlJS3dZQkJRVUhBd0l3REFZRFZSMFRBUUgvQkFJd0FEQWRCZ05WSFE0RUZnUVU4d2o4CmZyMWtRU3J4aUhsM21TRm9kazR1aEd3d0h3WURWUjBqQkJnd0ZvQVVjTklaTlZYam02elRqUGhrTThrTjM1OFMKeCtZd0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFJelZLWXhVZS9NdTEvZXY1VnhCVnpybUlUL1BYdkptR3dxZgpGSEdzOU5CdzRzS053ek1lMDRyRFdnclhCeFdOTG5vcnBRUTUzZFBaZytzSkpvZm5BSkNoN1ZLaEkwcGNqL0lsCnp4OUYxMWNqam9rZ2Jka0NRRVZqeDgxQ3c1T0toK2ptOS9UVGs0SDVxRHI5VU0rL002N3BNK1M4aFNJR3VTTDEKMUNmTVV0Tis5MEIwdTNQUGw2TzlhOW1SVStKUkNpSjAyNFg5b1FtTWZ1cUFsTzlIQ1ljRUVWK0pOOHd1MTZ5cworWGpXV0ZQNVZrY3hSdUMzMGM1OU9pRVJOWHVMNmFnUUJWeGk3NXlhdDZuQysrTEpPazhnRTc3UFh3WEVVSG9pCk1NLzZ0c1QwclNacDkycmVFL05hdUFBU0d6eE9kS2tyRDJWbHk4QnZYaEdWYzZ3alZNND0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=", "encoding": "base64", "item": [ "ca", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/certs/quadlet_demo.crt" } TASK [fedora.linux_system_roles.certificate : Reset certificate_test_certs] **** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:151 Saturday 19 July 2025 12:41:07 -0400 (0:00:01.402) 0:00:14.524 ********* ok: [managed-node2] => { "ansible_facts": { "certificate_test_certs": {} }, "changed": false } TASK [fedora.linux_system_roles.certificate : Create return data] ************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:155 Saturday 19 July 2025 12:41:07 -0400 (0:00:00.020) 0:00:14.545 ********* ok: [managed-node2] => (item=quadlet_demo) => { "ansible_facts": { "certificate_test_certs": { "quadlet_demo": { "ca": "/etc/pki/tls/certs/quadlet_demo.crt", "ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQdM709gnTSpWKc9q2kR+byDANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjNzRj\nZWY0ZjYtMDlkMzRhOTUtOGE3M2RhYjYtOTExZjliYzcwHhcNMjUwNzE5MTY0MTA2\nWhcNMjYwNzE5MTY0MTA1WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZeTppg4oaai8M+g3Y9TNIfbeozWj68knx\n2+NSZ/fRFtLgcprsYUohnl18L6whseZHh1ddY68c3hCWbRb1C6R4bQkh9oSCCfr5\nvkFWup4Nj3W63OJqMvRm99pRLGuUf+AJdQtEQPS8nR4a1gwyjlFLehp/sttTBveb\nuQt6KT9bPbKUa1As+qsLSuGDywE+oPYMy5PLOuM6HIfrTfaekBUvpcKAJZ80U9vP\nTM8iEIl/UJcGsYwVdfkjnR9iFC8iq/otk3xzeQudT6xgpcMjbDb5L5ZmLfsNeIfm\nL4KxZQhjKiS/fUo8d8OhbSe0OVT9AzEOMOK/LM5o8XwDy3CgAUqVAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQU8wj8\nfr1kQSrxiHl3mSFodk4uhGwwHwYDVR0jBBgwFoAUcNIZNVXjm6zTjPhkM8kN358S\nx+YwDQYJKoZIhvcNAQELBQADggEBAIzVKYxUe/Mu1/ev5VxBVzrmIT/PXvJmGwqf\nFHGs9NBw4sKNwzMe04rDWgrXBxWNLnorpQQ53dPZg+sJJofnAJCh7VKhI0pcj/Il\nzx9F11cjjokgbdkCQEVjx81Cw5OKh+jm9/TTk4H5qDr9UM+/M67pM+S8hSIGuSL1\n1CfMUtN+90B0u3PPl6O9a9mRU+JRCiJ024X9oQmMfuqAlO9HCYcEEV+JN8wu16ys\n+XjWWFP5VkcxRuC30c59OiERNXuL6agQBVxi75yat6nC++LJOk8gE77PXwXEUHoi\nMM/6tsT0rSZp92reE/NauAASGzxOdKkrD2Vly8BvXhGVc6wjVM4=\n-----END CERTIFICATE-----\n", "cert": "/etc/pki/tls/certs/quadlet_demo.crt", "cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQdM709gnTSpWKc9q2kR+byDANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjNzRj\nZWY0ZjYtMDlkMzRhOTUtOGE3M2RhYjYtOTExZjliYzcwHhcNMjUwNzE5MTY0MTA2\nWhcNMjYwNzE5MTY0MTA1WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZeTppg4oaai8M+g3Y9TNIfbeozWj68knx\n2+NSZ/fRFtLgcprsYUohnl18L6whseZHh1ddY68c3hCWbRb1C6R4bQkh9oSCCfr5\nvkFWup4Nj3W63OJqMvRm99pRLGuUf+AJdQtEQPS8nR4a1gwyjlFLehp/sttTBveb\nuQt6KT9bPbKUa1As+qsLSuGDywE+oPYMy5PLOuM6HIfrTfaekBUvpcKAJZ80U9vP\nTM8iEIl/UJcGsYwVdfkjnR9iFC8iq/otk3xzeQudT6xgpcMjbDb5L5ZmLfsNeIfm\nL4KxZQhjKiS/fUo8d8OhbSe0OVT9AzEOMOK/LM5o8XwDy3CgAUqVAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQU8wj8\nfr1kQSrxiHl3mSFodk4uhGwwHwYDVR0jBBgwFoAUcNIZNVXjm6zTjPhkM8kN358S\nx+YwDQYJKoZIhvcNAQELBQADggEBAIzVKYxUe/Mu1/ev5VxBVzrmIT/PXvJmGwqf\nFHGs9NBw4sKNwzMe04rDWgrXBxWNLnorpQQ53dPZg+sJJofnAJCh7VKhI0pcj/Il\nzx9F11cjjokgbdkCQEVjx81Cw5OKh+jm9/TTk4H5qDr9UM+/M67pM+S8hSIGuSL1\n1CfMUtN+90B0u3PPl6O9a9mRU+JRCiJ024X9oQmMfuqAlO9HCYcEEV+JN8wu16ys\n+XjWWFP5VkcxRuC30c59OiERNXuL6agQBVxi75yat6nC++LJOk8gE77PXwXEUHoi\nMM/6tsT0rSZp92reE/NauAASGzxOdKkrD2Vly8BvXhGVc6wjVM4=\n-----END CERTIFICATE-----\n", "key": "/etc/pki/tls/private/quadlet_demo.key", "key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDZeTppg4oaai8M\n+g3Y9TNIfbeozWj68knx2+NSZ/fRFtLgcprsYUohnl18L6whseZHh1ddY68c3hCW\nbRb1C6R4bQkh9oSCCfr5vkFWup4Nj3W63OJqMvRm99pRLGuUf+AJdQtEQPS8nR4a\n1gwyjlFLehp/sttTBvebuQt6KT9bPbKUa1As+qsLSuGDywE+oPYMy5PLOuM6HIfr\nTfaekBUvpcKAJZ80U9vPTM8iEIl/UJcGsYwVdfkjnR9iFC8iq/otk3xzeQudT6xg\npcMjbDb5L5ZmLfsNeIfmL4KxZQhjKiS/fUo8d8OhbSe0OVT9AzEOMOK/LM5o8XwD\ny3CgAUqVAgMBAAECggEBAMGO7nhfBRoBjCMGpDCH/L72dfKd8GP6JepIBYmg06ab\no9D08AKXjP1jEF2pqJDM3+KmCRIr2P3ef/EC4z3ywM4Wbdx2pvV+oIyL02e37Viw\naN6KHlLbS0AiNYOt/4pKTkLck8yJ68m8JcFI5c5RcPW2lcM2k5LQTq2hAyAVejhf\ni1LK/n0jjksiaUWUl3aJTn84hExDurVaBv2MwoFRGkxLYEFmBIVHR6HqRK46ms7x\n5GYZy8Gt0lopfOW1qFt+XE//RBFTPXKxbOl4IoUSVwDbVcfdPt9U/EX2aIf/f6A8\neOyZjyMGRXkYAFLO67cWANuE0N5+BZ9PbpgzQ6+1WcECgYEA8uh32yoHbqwREv9F\nRe1hN3ceimxm3bYxlnISgswb2bVuK8+Atmf+XJZMyqLu6dPhMKXtXjq5x3E9G26Y\nVBNIgz/P9kN0j+dOLlb2kps7Oy+AD/30azv9JNX2XSCafrbjHEXMWS+Mqp/LFWUj\n++1npYes29voKGKveuHA61zFEskCgYEA5THThLSTCQvKjFxQBocvKvE/ptaC4rCM\ngMm7rQ+Ok2Ne0EJaFI7NtipcATGr+O53iifcKQduyLCciFtLaV30Pm8sp1sOrfIa\nPz0XJK1IKjvfXoHpml3R0DoN8yUf268EAr0lL2MA2LRCrbsqdLFl++zfjvh61RoU\n93zqqr9oc20CgYB3jA2g4VhzEV/914BCYAeegpOXUK53o3lYL6AIm2eBiBdGnIMe\nQqQe8SGuAO5QSiTkCmV+IKzaCVMnsxBpIGxpm/g/nQc6433ItKf5k0hdYo1GUZf/\nRrDZdiwfvTIL9vpjKb7ejdZPCwGp7/L7w+VkxBdAxUPGzHrMoVqO+0NKcQKBgDL1\nGA0aRNKvD3KTl6tL5R3ME5XZfbI/BI4hzBtXKSD15/+vsO1/LpeA5YHSdGTdSDCs\nWhO5NlVihvr+KU8c94/mEeyHn0TnXhSPVZTu/et98lTF15VdpKeCMNXNfD7Qs7iU\nKy9U+Lw1n9mWkoLtajpP2QilkFHPAbhgyp/5/JQlAoGAGKdisbssaD2jH/2epQmS\nbcSWagA12so6eTM+6QuKZvBS4nN3Fc0ZNPLpYCCD0I/W9qrudCoeUCQUgcnNoya5\n/0JoNoR9sK38Vjj87Fa/ZgKS9HjGSUI26+8LEUZer4EdTHk5tYxTHZFN33vp4BmF\nTI8S0bNNG7axPPgFTEAbx9k=\n-----END PRIVATE KEY-----\n" } } }, "ansible_loop_var": "cert_name", "cert_name": "quadlet_demo", "changed": false } TASK [fedora.linux_system_roles.certificate : Stop tracking certificates] ****** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:169 Saturday 19 July 2025 12:41:07 -0400 (0:00:00.055) 0:00:14.600 ********* ok: [managed-node2] => (item={'cert': '/etc/pki/tls/certs/quadlet_demo.crt', 'key': '/etc/pki/tls/private/quadlet_demo.key', 'ca': '/etc/pki/tls/certs/quadlet_demo.crt', 'cert_content': '-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQdM709gnTSpWKc9q2kR+byDANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjNzRj\nZWY0ZjYtMDlkMzRhOTUtOGE3M2RhYjYtOTExZjliYzcwHhcNMjUwNzE5MTY0MTA2\nWhcNMjYwNzE5MTY0MTA1WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZeTppg4oaai8M+g3Y9TNIfbeozWj68knx\n2+NSZ/fRFtLgcprsYUohnl18L6whseZHh1ddY68c3hCWbRb1C6R4bQkh9oSCCfr5\nvkFWup4Nj3W63OJqMvRm99pRLGuUf+AJdQtEQPS8nR4a1gwyjlFLehp/sttTBveb\nuQt6KT9bPbKUa1As+qsLSuGDywE+oPYMy5PLOuM6HIfrTfaekBUvpcKAJZ80U9vP\nTM8iEIl/UJcGsYwVdfkjnR9iFC8iq/otk3xzeQudT6xgpcMjbDb5L5ZmLfsNeIfm\nL4KxZQhjKiS/fUo8d8OhbSe0OVT9AzEOMOK/LM5o8XwDy3CgAUqVAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQU8wj8\nfr1kQSrxiHl3mSFodk4uhGwwHwYDVR0jBBgwFoAUcNIZNVXjm6zTjPhkM8kN358S\nx+YwDQYJKoZIhvcNAQELBQADggEBAIzVKYxUe/Mu1/ev5VxBVzrmIT/PXvJmGwqf\nFHGs9NBw4sKNwzMe04rDWgrXBxWNLnorpQQ53dPZg+sJJofnAJCh7VKhI0pcj/Il\nzx9F11cjjokgbdkCQEVjx81Cw5OKh+jm9/TTk4H5qDr9UM+/M67pM+S8hSIGuSL1\n1CfMUtN+90B0u3PPl6O9a9mRU+JRCiJ024X9oQmMfuqAlO9HCYcEEV+JN8wu16ys\n+XjWWFP5VkcxRuC30c59OiERNXuL6agQBVxi75yat6nC++LJOk8gE77PXwXEUHoi\nMM/6tsT0rSZp92reE/NauAASGzxOdKkrD2Vly8BvXhGVc6wjVM4=\n-----END CERTIFICATE-----\n', 'key_content': '-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDZeTppg4oaai8M\n+g3Y9TNIfbeozWj68knx2+NSZ/fRFtLgcprsYUohnl18L6whseZHh1ddY68c3hCW\nbRb1C6R4bQkh9oSCCfr5vkFWup4Nj3W63OJqMvRm99pRLGuUf+AJdQtEQPS8nR4a\n1gwyjlFLehp/sttTBvebuQt6KT9bPbKUa1As+qsLSuGDywE+oPYMy5PLOuM6HIfr\nTfaekBUvpcKAJZ80U9vPTM8iEIl/UJcGsYwVdfkjnR9iFC8iq/otk3xzeQudT6xg\npcMjbDb5L5ZmLfsNeIfmL4KxZQhjKiS/fUo8d8OhbSe0OVT9AzEOMOK/LM5o8XwD\ny3CgAUqVAgMBAAECggEBAMGO7nhfBRoBjCMGpDCH/L72dfKd8GP6JepIBYmg06ab\no9D08AKXjP1jEF2pqJDM3+KmCRIr2P3ef/EC4z3ywM4Wbdx2pvV+oIyL02e37Viw\naN6KHlLbS0AiNYOt/4pKTkLck8yJ68m8JcFI5c5RcPW2lcM2k5LQTq2hAyAVejhf\ni1LK/n0jjksiaUWUl3aJTn84hExDurVaBv2MwoFRGkxLYEFmBIVHR6HqRK46ms7x\n5GYZy8Gt0lopfOW1qFt+XE//RBFTPXKxbOl4IoUSVwDbVcfdPt9U/EX2aIf/f6A8\neOyZjyMGRXkYAFLO67cWANuE0N5+BZ9PbpgzQ6+1WcECgYEA8uh32yoHbqwREv9F\nRe1hN3ceimxm3bYxlnISgswb2bVuK8+Atmf+XJZMyqLu6dPhMKXtXjq5x3E9G26Y\nVBNIgz/P9kN0j+dOLlb2kps7Oy+AD/30azv9JNX2XSCafrbjHEXMWS+Mqp/LFWUj\n++1npYes29voKGKveuHA61zFEskCgYEA5THThLSTCQvKjFxQBocvKvE/ptaC4rCM\ngMm7rQ+Ok2Ne0EJaFI7NtipcATGr+O53iifcKQduyLCciFtLaV30Pm8sp1sOrfIa\nPz0XJK1IKjvfXoHpml3R0DoN8yUf268EAr0lL2MA2LRCrbsqdLFl++zfjvh61RoU\n93zqqr9oc20CgYB3jA2g4VhzEV/914BCYAeegpOXUK53o3lYL6AIm2eBiBdGnIMe\nQqQe8SGuAO5QSiTkCmV+IKzaCVMnsxBpIGxpm/g/nQc6433ItKf5k0hdYo1GUZf/\nRrDZdiwfvTIL9vpjKb7ejdZPCwGp7/L7w+VkxBdAxUPGzHrMoVqO+0NKcQKBgDL1\nGA0aRNKvD3KTl6tL5R3ME5XZfbI/BI4hzBtXKSD15/+vsO1/LpeA5YHSdGTdSDCs\nWhO5NlVihvr+KU8c94/mEeyHn0TnXhSPVZTu/et98lTF15VdpKeCMNXNfD7Qs7iU\nKy9U+Lw1n9mWkoLtajpP2QilkFHPAbhgyp/5/JQlAoGAGKdisbssaD2jH/2epQmS\nbcSWagA12so6eTM+6QuKZvBS4nN3Fc0ZNPLpYCCD0I/W9qrudCoeUCQUgcnNoya5\n/0JoNoR9sK38Vjj87Fa/ZgKS9HjGSUI26+8LEUZer4EdTHk5tYxTHZFN33vp4BmF\nTI8S0bNNG7axPPgFTEAbx9k=\n-----END PRIVATE KEY-----\n', 'ca_content': '-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQdM709gnTSpWKc9q2kR+byDANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjNzRj\nZWY0ZjYtMDlkMzRhOTUtOGE3M2RhYjYtOTExZjliYzcwHhcNMjUwNzE5MTY0MTA2\nWhcNMjYwNzE5MTY0MTA1WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZeTppg4oaai8M+g3Y9TNIfbeozWj68knx\n2+NSZ/fRFtLgcprsYUohnl18L6whseZHh1ddY68c3hCWbRb1C6R4bQkh9oSCCfr5\nvkFWup4Nj3W63OJqMvRm99pRLGuUf+AJdQtEQPS8nR4a1gwyjlFLehp/sttTBveb\nuQt6KT9bPbKUa1As+qsLSuGDywE+oPYMy5PLOuM6HIfrTfaekBUvpcKAJZ80U9vP\nTM8iEIl/UJcGsYwVdfkjnR9iFC8iq/otk3xzeQudT6xgpcMjbDb5L5ZmLfsNeIfm\nL4KxZQhjKiS/fUo8d8OhbSe0OVT9AzEOMOK/LM5o8XwDy3CgAUqVAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQU8wj8\nfr1kQSrxiHl3mSFodk4uhGwwHwYDVR0jBBgwFoAUcNIZNVXjm6zTjPhkM8kN358S\nx+YwDQYJKoZIhvcNAQELBQADggEBAIzVKYxUe/Mu1/ev5VxBVzrmIT/PXvJmGwqf\nFHGs9NBw4sKNwzMe04rDWgrXBxWNLnorpQQ53dPZg+sJJofnAJCh7VKhI0pcj/Il\nzx9F11cjjokgbdkCQEVjx81Cw5OKh+jm9/TTk4H5qDr9UM+/M67pM+S8hSIGuSL1\n1CfMUtN+90B0u3PPl6O9a9mRU+JRCiJ024X9oQmMfuqAlO9HCYcEEV+JN8wu16ys\n+XjWWFP5VkcxRuC30c59OiERNXuL6agQBVxi75yat6nC++LJOk8gE77PXwXEUHoi\nMM/6tsT0rSZp92reE/NauAASGzxOdKkrD2Vly8BvXhGVc6wjVM4=\n-----END CERTIFICATE-----\n'}) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "getcert", "stop-tracking", "-f", "/etc/pki/tls/certs/quadlet_demo.crt" ], "delta": "0:00:00.049546", "end": "2025-07-19 12:41:08.388518", "item": { "ca": "/etc/pki/tls/certs/quadlet_demo.crt", "ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQdM709gnTSpWKc9q2kR+byDANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjNzRj\nZWY0ZjYtMDlkMzRhOTUtOGE3M2RhYjYtOTExZjliYzcwHhcNMjUwNzE5MTY0MTA2\nWhcNMjYwNzE5MTY0MTA1WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZeTppg4oaai8M+g3Y9TNIfbeozWj68knx\n2+NSZ/fRFtLgcprsYUohnl18L6whseZHh1ddY68c3hCWbRb1C6R4bQkh9oSCCfr5\nvkFWup4Nj3W63OJqMvRm99pRLGuUf+AJdQtEQPS8nR4a1gwyjlFLehp/sttTBveb\nuQt6KT9bPbKUa1As+qsLSuGDywE+oPYMy5PLOuM6HIfrTfaekBUvpcKAJZ80U9vP\nTM8iEIl/UJcGsYwVdfkjnR9iFC8iq/otk3xzeQudT6xgpcMjbDb5L5ZmLfsNeIfm\nL4KxZQhjKiS/fUo8d8OhbSe0OVT9AzEOMOK/LM5o8XwDy3CgAUqVAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQU8wj8\nfr1kQSrxiHl3mSFodk4uhGwwHwYDVR0jBBgwFoAUcNIZNVXjm6zTjPhkM8kN358S\nx+YwDQYJKoZIhvcNAQELBQADggEBAIzVKYxUe/Mu1/ev5VxBVzrmIT/PXvJmGwqf\nFHGs9NBw4sKNwzMe04rDWgrXBxWNLnorpQQ53dPZg+sJJofnAJCh7VKhI0pcj/Il\nzx9F11cjjokgbdkCQEVjx81Cw5OKh+jm9/TTk4H5qDr9UM+/M67pM+S8hSIGuSL1\n1CfMUtN+90B0u3PPl6O9a9mRU+JRCiJ024X9oQmMfuqAlO9HCYcEEV+JN8wu16ys\n+XjWWFP5VkcxRuC30c59OiERNXuL6agQBVxi75yat6nC++LJOk8gE77PXwXEUHoi\nMM/6tsT0rSZp92reE/NauAASGzxOdKkrD2Vly8BvXhGVc6wjVM4=\n-----END CERTIFICATE-----\n", "cert": "/etc/pki/tls/certs/quadlet_demo.crt", "cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQdM709gnTSpWKc9q2kR+byDANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjNzRj\nZWY0ZjYtMDlkMzRhOTUtOGE3M2RhYjYtOTExZjliYzcwHhcNMjUwNzE5MTY0MTA2\nWhcNMjYwNzE5MTY0MTA1WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZeTppg4oaai8M+g3Y9TNIfbeozWj68knx\n2+NSZ/fRFtLgcprsYUohnl18L6whseZHh1ddY68c3hCWbRb1C6R4bQkh9oSCCfr5\nvkFWup4Nj3W63OJqMvRm99pRLGuUf+AJdQtEQPS8nR4a1gwyjlFLehp/sttTBveb\nuQt6KT9bPbKUa1As+qsLSuGDywE+oPYMy5PLOuM6HIfrTfaekBUvpcKAJZ80U9vP\nTM8iEIl/UJcGsYwVdfkjnR9iFC8iq/otk3xzeQudT6xgpcMjbDb5L5ZmLfsNeIfm\nL4KxZQhjKiS/fUo8d8OhbSe0OVT9AzEOMOK/LM5o8XwDy3CgAUqVAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQU8wj8\nfr1kQSrxiHl3mSFodk4uhGwwHwYDVR0jBBgwFoAUcNIZNVXjm6zTjPhkM8kN358S\nx+YwDQYJKoZIhvcNAQELBQADggEBAIzVKYxUe/Mu1/ev5VxBVzrmIT/PXvJmGwqf\nFHGs9NBw4sKNwzMe04rDWgrXBxWNLnorpQQ53dPZg+sJJofnAJCh7VKhI0pcj/Il\nzx9F11cjjokgbdkCQEVjx81Cw5OKh+jm9/TTk4H5qDr9UM+/M67pM+S8hSIGuSL1\n1CfMUtN+90B0u3PPl6O9a9mRU+JRCiJ024X9oQmMfuqAlO9HCYcEEV+JN8wu16ys\n+XjWWFP5VkcxRuC30c59OiERNXuL6agQBVxi75yat6nC++LJOk8gE77PXwXEUHoi\nMM/6tsT0rSZp92reE/NauAASGzxOdKkrD2Vly8BvXhGVc6wjVM4=\n-----END CERTIFICATE-----\n", "key": "/etc/pki/tls/private/quadlet_demo.key", "key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDZeTppg4oaai8M\n+g3Y9TNIfbeozWj68knx2+NSZ/fRFtLgcprsYUohnl18L6whseZHh1ddY68c3hCW\nbRb1C6R4bQkh9oSCCfr5vkFWup4Nj3W63OJqMvRm99pRLGuUf+AJdQtEQPS8nR4a\n1gwyjlFLehp/sttTBvebuQt6KT9bPbKUa1As+qsLSuGDywE+oPYMy5PLOuM6HIfr\nTfaekBUvpcKAJZ80U9vPTM8iEIl/UJcGsYwVdfkjnR9iFC8iq/otk3xzeQudT6xg\npcMjbDb5L5ZmLfsNeIfmL4KxZQhjKiS/fUo8d8OhbSe0OVT9AzEOMOK/LM5o8XwD\ny3CgAUqVAgMBAAECggEBAMGO7nhfBRoBjCMGpDCH/L72dfKd8GP6JepIBYmg06ab\no9D08AKXjP1jEF2pqJDM3+KmCRIr2P3ef/EC4z3ywM4Wbdx2pvV+oIyL02e37Viw\naN6KHlLbS0AiNYOt/4pKTkLck8yJ68m8JcFI5c5RcPW2lcM2k5LQTq2hAyAVejhf\ni1LK/n0jjksiaUWUl3aJTn84hExDurVaBv2MwoFRGkxLYEFmBIVHR6HqRK46ms7x\n5GYZy8Gt0lopfOW1qFt+XE//RBFTPXKxbOl4IoUSVwDbVcfdPt9U/EX2aIf/f6A8\neOyZjyMGRXkYAFLO67cWANuE0N5+BZ9PbpgzQ6+1WcECgYEA8uh32yoHbqwREv9F\nRe1hN3ceimxm3bYxlnISgswb2bVuK8+Atmf+XJZMyqLu6dPhMKXtXjq5x3E9G26Y\nVBNIgz/P9kN0j+dOLlb2kps7Oy+AD/30azv9JNX2XSCafrbjHEXMWS+Mqp/LFWUj\n++1npYes29voKGKveuHA61zFEskCgYEA5THThLSTCQvKjFxQBocvKvE/ptaC4rCM\ngMm7rQ+Ok2Ne0EJaFI7NtipcATGr+O53iifcKQduyLCciFtLaV30Pm8sp1sOrfIa\nPz0XJK1IKjvfXoHpml3R0DoN8yUf268EAr0lL2MA2LRCrbsqdLFl++zfjvh61RoU\n93zqqr9oc20CgYB3jA2g4VhzEV/914BCYAeegpOXUK53o3lYL6AIm2eBiBdGnIMe\nQqQe8SGuAO5QSiTkCmV+IKzaCVMnsxBpIGxpm/g/nQc6433ItKf5k0hdYo1GUZf/\nRrDZdiwfvTIL9vpjKb7ejdZPCwGp7/L7w+VkxBdAxUPGzHrMoVqO+0NKcQKBgDL1\nGA0aRNKvD3KTl6tL5R3ME5XZfbI/BI4hzBtXKSD15/+vsO1/LpeA5YHSdGTdSDCs\nWhO5NlVihvr+KU8c94/mEeyHn0TnXhSPVZTu/et98lTF15VdpKeCMNXNfD7Qs7iU\nKy9U+Lw1n9mWkoLtajpP2QilkFHPAbhgyp/5/JQlAoGAGKdisbssaD2jH/2epQmS\nbcSWagA12so6eTM+6QuKZvBS4nN3Fc0ZNPLpYCCD0I/W9qrudCoeUCQUgcnNoya5\n/0JoNoR9sK38Vjj87Fa/ZgKS9HjGSUI26+8LEUZer4EdTHk5tYxTHZFN33vp4BmF\nTI8S0bNNG7axPPgFTEAbx9k=\n-----END PRIVATE KEY-----\n" }, "rc": 0, "start": "2025-07-19 12:41:08.338972" } STDOUT: Request "20250719164106" removed. TASK [fedora.linux_system_roles.certificate : Remove files] ******************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:174 Saturday 19 July 2025 12:41:08 -0400 (0:00:00.554) 0:00:15.155 ********* changed: [managed-node2] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => { "ansible_loop_var": "item", "changed": true, "item": "/etc/pki/tls/certs/quadlet_demo.crt", "path": "/etc/pki/tls/certs/quadlet_demo.crt", "state": "absent" } changed: [managed-node2] => (item=/etc/pki/tls/private/quadlet_demo.key) => { "ansible_loop_var": "item", "changed": true, "item": "/etc/pki/tls/private/quadlet_demo.key", "path": "/etc/pki/tls/private/quadlet_demo.key", "state": "absent" } ok: [managed-node2] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => { "ansible_loop_var": "item", "changed": false, "item": "/etc/pki/tls/certs/quadlet_demo.crt", "path": "/etc/pki/tls/certs/quadlet_demo.crt", "state": "absent" } TASK [Run the role] ************************************************************ task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:62 Saturday 19 July 2025 12:41:09 -0400 (0:00:01.470) 0:00:16.626 ********* TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 19 July 2025 12:41:10 -0400 (0:00:00.094) 0:00:16.721 ********* included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 19 July 2025 12:41:10 -0400 (0:00:00.024) 0:00:16.746 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 19 July 2025 12:41:10 -0400 (0:00:00.017) 0:00:16.763 ********* ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 19 July 2025 12:41:10 -0400 (0:00:00.367) 0:00:17.131 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 19 July 2025 12:41:10 -0400 (0:00:00.031) 0:00:17.163 ********* ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 19 July 2025 12:41:10 -0400 (0:00:00.373) 0:00:17.537 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 19 July 2025 12:41:10 -0400 (0:00:00.029) 0:00:17.566 ********* ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node2] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 19 July 2025 12:41:10 -0400 (0:00:00.069) 0:00:17.636 ********* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 19 July 2025 12:41:12 -0400 (0:00:01.826) 0:00:19.462 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 19 July 2025 12:41:12 -0400 (0:00:00.034) 0:00:19.496 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 19 July 2025 12:41:12 -0400 (0:00:00.045) 0:00:19.542 ********* skipping: [managed-node2] => {} TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 19 July 2025 12:41:12 -0400 (0:00:00.040) 0:00:19.582 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 19 July 2025 12:41:12 -0400 (0:00:00.033) 0:00:19.615 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 19 July 2025 12:41:12 -0400 (0:00:00.030) 0:00:19.646 ********* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.029519", "end": "2025-07-19 12:41:13.261741", "rc": 0, "start": "2025-07-19 12:41:13.232222" } STDOUT: podman version 4.9.4-dev TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 19 July 2025 12:41:13 -0400 (0:00:00.371) 0:00:20.018 ********* ok: [managed-node2] => { "ansible_facts": { "podman_version": "4.9.4-dev" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 19 July 2025 12:41:13 -0400 (0:00:00.033) 0:00:20.051 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 19 July 2025 12:41:13 -0400 (0:00:00.030) 0:00:20.082 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 19 July 2025 12:41:13 -0400 (0:00:00.131) 0:00:20.213 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 19 July 2025 12:41:13 -0400 (0:00:00.078) 0:00:20.291 ********* included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 19 July 2025 12:41:13 -0400 (0:00:00.060) 0:00:20.352 ********* ok: [managed-node2] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "root", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 19 July 2025 12:41:14 -0400 (0:00:00.538) 0:00:20.891 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 19 July 2025 12:41:14 -0400 (0:00:00.059) 0:00:20.950 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 19 July 2025 12:41:14 -0400 (0:00:00.065) 0:00:21.016 ********* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1752942923.4985383, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1752942894.3494363, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986657, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "3031672287", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 19 July 2025 12:41:14 -0400 (0:00:00.410) 0:00:21.427 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 19 July 2025 12:41:14 -0400 (0:00:00.055) 0:00:21.483 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 19 July 2025 12:41:14 -0400 (0:00:00.054) 0:00:21.537 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 19 July 2025 12:41:14 -0400 (0:00:00.054) 0:00:21.592 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 19 July 2025 12:41:14 -0400 (0:00:00.053) 0:00:21.646 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 19 July 2025 12:41:15 -0400 (0:00:00.056) 0:00:21.702 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 19 July 2025 12:41:15 -0400 (0:00:00.055) 0:00:21.757 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 19 July 2025 12:41:15 -0400 (0:00:00.052) 0:00:21.810 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 19 July 2025 12:41:15 -0400 (0:00:00.054) 0:00:21.864 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Saturday 19 July 2025 12:41:15 -0400 (0:00:00.127) 0:00:21.991 ********* included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 19 July 2025 12:41:15 -0400 (0:00:00.100) 0:00:22.092 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 19 July 2025 12:41:15 -0400 (0:00:00.053) 0:00:22.145 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Saturday 19 July 2025 12:41:15 -0400 (0:00:00.089) 0:00:22.235 ********* included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 19 July 2025 12:41:15 -0400 (0:00:00.107) 0:00:22.342 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 19 July 2025 12:41:15 -0400 (0:00:00.047) 0:00:22.390 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Saturday 19 July 2025 12:41:15 -0400 (0:00:00.035) 0:00:22.425 ********* included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Saturday 19 July 2025 12:41:15 -0400 (0:00:00.083) 0:00:22.508 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Saturday 19 July 2025 12:41:15 -0400 (0:00:00.037) 0:00:22.546 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Saturday 19 July 2025 12:41:15 -0400 (0:00:00.033) 0:00:22.580 ********* included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Saturday 19 July 2025 12:41:15 -0400 (0:00:00.064) 0:00:22.644 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Saturday 19 July 2025 12:41:15 -0400 (0:00:00.032) 0:00:22.676 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Saturday 19 July 2025 12:41:16 -0400 (0:00:00.031) 0:00:22.708 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Saturday 19 July 2025 12:41:16 -0400 (0:00:00.031) 0:00:22.739 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Saturday 19 July 2025 12:41:16 -0400 (0:00:00.031) 0:00:22.771 ********* TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Saturday 19 July 2025 12:41:16 -0400 (0:00:00.101) 0:00:22.873 ********* included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Saturday 19 July 2025 12:41:16 -0400 (0:00:00.101) 0:00:22.974 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Saturday 19 July 2025 12:41:16 -0400 (0:00:00.039) 0:00:23.014 ********* ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Saturday 19 July 2025 12:41:16 -0400 (0:00:00.353) 0:00:23.368 ********* ok: [managed-node2] => { "ansible_facts": { "__firewall_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Saturday 19 July 2025 12:41:16 -0400 (0:00:00.035) 0:00:23.404 ********* ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Saturday 19 July 2025 12:41:17 -0400 (0:00:00.368) 0:00:23.772 ********* ok: [managed-node2] => { "ansible_facts": { "__firewall_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Run systemctl] ********************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:34 Saturday 19 July 2025 12:41:17 -0400 (0:00:00.055) 0:00:23.827 ********* ok: [managed-node2] => { "changed": false, "cmd": [ "systemctl", "is-system-running" ], "delta": "0:00:00.007461", "end": "2025-07-19 12:41:17.466907", "failed_when_result": false, "rc": 0, "start": "2025-07-19 12:41:17.459446" } STDOUT: running TASK [fedora.linux_system_roles.firewall : Require installed systemd] ********** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:41 Saturday 19 July 2025 12:41:17 -0400 (0:00:00.401) 0:00:24.229 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:46 Saturday 19 July 2025 12:41:17 -0400 (0:00:00.034) 0:00:24.263 ********* ok: [managed-node2] => { "ansible_facts": { "__firewall_is_booted": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:51 Saturday 19 July 2025 12:41:17 -0400 (0:00:00.036) 0:00:24.299 ********* ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: firewalld TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:63 Saturday 19 July 2025 12:41:20 -0400 (0:00:02.921) 0:00:27.221 ********* skipping: [managed-node2] => {} TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:68 Saturday 19 July 2025 12:41:20 -0400 (0:00:00.051) 0:00:27.273 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:73 Saturday 19 July 2025 12:41:20 -0400 (0:00:00.050) 0:00:27.323 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check which conflicting services are enabled] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Saturday 19 July 2025 12:41:20 -0400 (0:00:00.049) 0:00:27.373 ********* skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "item": "ufw", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:14 Saturday 19 July 2025 12:41:20 -0400 (0:00:00.064) 0:00:27.437 ********* skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'item': 'nftables', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "item": { "ansible_loop_var": "item", "changed": false, "item": "nftables", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'item': 'iptables', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "item": { "ansible_loop_var": "item", "changed": false, "item": "iptables", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'item': 'ufw', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "item": { "ansible_loop_var": "item", "changed": false, "item": "ufw", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:24 Saturday 19 July 2025 12:41:20 -0400 (0:00:00.070) 0:00:27.508 ********* ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "ActiveEnterTimestamp": "Sat 2025-07-19 12:35:31 EDT", "ActiveEnterTimestampMonotonic": "319052571", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "polkit.service dbus.service system.slice basic.target dbus.socket sysinit.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-07-19 12:35:30 EDT", "AssertTimestampMonotonic": "318194415", "Before": "network-pre.target multi-user.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-07-19 12:35:30 EDT", "ConditionTimestampMonotonic": "318194414", "ConfigurationDirectoryMode": "0755", "Conflicts": "iptables.service ipset.service ebtables.service ip6tables.service nftables.service shutdown.target", "ControlGroup": "/system.slice/firewalld.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "man:firewalld(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "12929", "ExecMainStartTimestamp": "Sat 2025-07-19 12:35:30 EDT", "ExecMainStartTimestampMonotonic": "318201406", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-07-19 12:35:30 EDT", "InactiveExitTimestampMonotonic": "318201438", "InvocationID": "1a69340d6e3c45249ec2c2f742d27736", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "12929", "MemoryAccounting": "yes", "MemoryCurrent": "42983424", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sat 2025-07-19 12:35:31 EDT", "StateChangeTimestampMonotonic": "319052571", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogTimestamp": "Sat 2025-07-19 12:35:31 EDT", "WatchdogTimestampMonotonic": "319052568", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:30 Saturday 19 July 2025 12:41:21 -0400 (0:00:00.544) 0:00:28.053 ********* ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "ActiveEnterTimestamp": "Sat 2025-07-19 12:35:31 EDT", "ActiveEnterTimestampMonotonic": "319052571", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "polkit.service dbus.service system.slice basic.target dbus.socket sysinit.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-07-19 12:35:30 EDT", "AssertTimestampMonotonic": "318194415", "Before": "network-pre.target multi-user.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-07-19 12:35:30 EDT", "ConditionTimestampMonotonic": "318194414", "ConfigurationDirectoryMode": "0755", "Conflicts": "iptables.service ipset.service ebtables.service ip6tables.service nftables.service shutdown.target", "ControlGroup": "/system.slice/firewalld.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "man:firewalld(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "12929", "ExecMainStartTimestamp": "Sat 2025-07-19 12:35:30 EDT", "ExecMainStartTimestampMonotonic": "318201406", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-07-19 12:35:30 EDT", "InactiveExitTimestampMonotonic": "318201438", "InvocationID": "1a69340d6e3c45249ec2c2f742d27736", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "12929", "MemoryAccounting": "yes", "MemoryCurrent": "42983424", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sat 2025-07-19 12:35:31 EDT", "StateChangeTimestampMonotonic": "319052571", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogTimestamp": "Sat 2025-07-19 12:35:31 EDT", "WatchdogTimestampMonotonic": "319052568", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:36 Saturday 19 July 2025 12:41:21 -0400 (0:00:00.510) 0:00:28.564 ********* ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/libexec/platform-python", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:45 Saturday 19 July 2025 12:41:21 -0400 (0:00:00.066) 0:00:28.630 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:58 Saturday 19 July 2025 12:41:21 -0400 (0:00:00.033) 0:00:28.664 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:74 Saturday 19 July 2025 12:41:22 -0400 (0:00:00.038) 0:00:28.702 ********* changed: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "port": "8000/tcp", "state": "enabled" } } changed: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "port": "9000/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:126 Saturday 19 July 2025 12:41:23 -0400 (0:00:01.264) 0:00:29.967 ********* skipping: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "item": { "port": "8000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "item": { "port": "9000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:137 Saturday 19 July 2025 12:41:23 -0400 (0:00:00.057) 0:00:30.025 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:146 Saturday 19 July 2025 12:41:23 -0400 (0:00:00.069) 0:00:30.094 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:152 Saturday 19 July 2025 12:41:23 -0400 (0:00:00.041) 0:00:30.135 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:161 Saturday 19 July 2025 12:41:23 -0400 (0:00:00.040) 0:00:30.176 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:172 Saturday 19 July 2025 12:41:23 -0400 (0:00:00.040) 0:00:30.217 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:178 Saturday 19 July 2025 12:41:23 -0400 (0:00:00.037) 0:00:30.255 ********* skipping: [managed-node2] => {} TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Saturday 19 July 2025 12:41:23 -0400 (0:00:00.030) 0:00:30.285 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Saturday 19 July 2025 12:41:23 -0400 (0:00:00.031) 0:00:30.317 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Saturday 19 July 2025 12:41:23 -0400 (0:00:00.028) 0:00:30.346 ********* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Saturday 19 July 2025 12:41:23 -0400 (0:00:00.028) 0:00:30.375 ********* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Saturday 19 July 2025 12:41:23 -0400 (0:00:00.060) 0:00:30.436 ********* included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 19 July 2025 12:41:23 -0400 (0:00:00.125) 0:00:30.562 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 19 July 2025 12:41:23 -0400 (0:00:00.034) 0:00:30.596 ********* included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 19 July 2025 12:41:23 -0400 (0:00:00.061) 0:00:30.658 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 19 July 2025 12:41:24 -0400 (0:00:00.057) 0:00:30.715 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 19 July 2025 12:41:24 -0400 (0:00:00.055) 0:00:30.771 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 19 July 2025 12:41:24 -0400 (0:00:00.048) 0:00:30.820 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 19 July 2025 12:41:24 -0400 (0:00:00.036) 0:00:30.857 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 19 July 2025 12:41:24 -0400 (0:00:00.038) 0:00:30.895 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 19 July 2025 12:41:24 -0400 (0:00:00.035) 0:00:30.930 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 19 July 2025 12:41:24 -0400 (0:00:00.030) 0:00:30.961 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 19 July 2025 12:41:24 -0400 (0:00:00.031) 0:00:30.993 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 19 July 2025 12:41:24 -0400 (0:00:00.029) 0:00:31.022 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 19 July 2025 12:41:24 -0400 (0:00:00.031) 0:00:31.054 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 19 July 2025 12:41:24 -0400 (0:00:00.031) 0:00:31.085 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 19 July 2025 12:41:24 -0400 (0:00:00.043) 0:00:31.129 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 19 July 2025 12:41:24 -0400 (0:00:00.099) 0:00:31.228 ********* included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 19 July 2025 12:41:24 -0400 (0:00:00.097) 0:00:31.325 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 19 July 2025 12:41:24 -0400 (0:00:00.053) 0:00:31.379 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 19 July 2025 12:41:24 -0400 (0:00:00.055) 0:00:31.434 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 19 July 2025 12:41:24 -0400 (0:00:00.052) 0:00:31.487 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 19 July 2025 12:41:24 -0400 (0:00:00.051) 0:00:31.538 ********* fatal: [managed-node2]: FAILED! => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result" } TASK [Dump journal] ************************************************************ task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:142 Saturday 19 July 2025 12:41:24 -0400 (0:00:00.058) 0:00:31.597 ********* fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.027461", "end": "2025-07-19 12:41:25.259357", "failed_when_result": true, "rc": 0, "start": "2025-07-19 12:41:25.231896" } STDOUT: -- Logs begin at Sat 2025-07-19 12:30:11 EDT, end at Sat 2025-07-19 12:41:25 EDT. -- Jul 19 12:35:22 managed-node2 platform-python[11895]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:35:23 managed-node2 platform-python[12024]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 19 12:35:23 managed-node2 platform-python[12148]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:35:24 managed-node2 platform-python[12273]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:35:25 managed-node2 platform-python[12396]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:35:25 managed-node2 platform-python[12519]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:35:26 managed-node2 platform-python[12643]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:35:29 managed-node2 platform-python[12766]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 19 12:35:30 managed-node2 platform-python[12893]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 19 12:35:30 managed-node2 systemd[1]: Reloading. Jul 19 12:35:30 managed-node2 systemd[1]: Starting firewalld - dynamic firewall daemon... -- Subject: Unit firewalld.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit firewalld.service has begun starting up. Jul 19 12:35:31 managed-node2 systemd[1]: Started firewalld - dynamic firewall daemon. -- Subject: Unit firewalld.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit firewalld.service has finished starting up. -- -- The start-up result is done. Jul 19 12:35:31 managed-node2 firewalld[12929]: WARNING: AllowZoneDrifting is enabled. This is considered an insecure configuration option. It will be removed in a future release. Please consider disabling it now. Jul 19 12:35:32 managed-node2 platform-python[13115]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 19 12:35:33 managed-node2 platform-python[13238]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:35:33 managed-node2 platform-python[13361]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:35:34 managed-node2 platform-python[13484]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:35:37 managed-node2 platform-python[13607]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:35:39 managed-node2 platform-python[13730]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:35:42 managed-node2 dbus-daemon[591]: [system] Reloaded configuration Jul 19 12:35:42 managed-node2 dbus-daemon[591]: [system] Reloaded configuration Jul 19 12:35:42 managed-node2 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. -- Subject: Unit run-r58006eb3d48a46a9a552c0899f8af7ac.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit run-r58006eb3d48a46a9a552c0899f8af7ac.service has finished starting up. -- -- The start-up result is done. Jul 19 12:35:42 managed-node2 systemd[1]: Starting man-db-cache-update.service... -- Subject: Unit man-db-cache-update.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has begun starting up. Jul 19 12:35:42 managed-node2 systemd[1]: man-db-cache-update.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit man-db-cache-update.service has successfully entered the 'dead' state. Jul 19 12:35:42 managed-node2 systemd[1]: Started man-db-cache-update.service. -- Subject: Unit man-db-cache-update.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has finished starting up. -- -- The start-up result is done. Jul 19 12:35:42 managed-node2 systemd[1]: run-r58006eb3d48a46a9a552c0899f8af7ac.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-r58006eb3d48a46a9a552c0899f8af7ac.service has successfully entered the 'dead' state. Jul 19 12:35:43 managed-node2 platform-python[14335]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 19 12:35:43 managed-node2 platform-python[14483]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:35:45 managed-node2 platform-python[14607]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 19 12:35:46 managed-node2 kernel: SELinux: Converting 460 SID table entries... Jul 19 12:35:46 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Jul 19 12:35:46 managed-node2 kernel: SELinux: policy capability open_perms=1 Jul 19 12:35:46 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Jul 19 12:35:46 managed-node2 kernel: SELinux: policy capability always_check_network=0 Jul 19 12:35:46 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Jul 19 12:35:46 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jul 19 12:35:46 managed-node2 dbus-daemon[591]: [system] Reloaded configuration Jul 19 12:35:47 managed-node2 platform-python[14734]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 19 12:35:51 managed-node2 platform-python[14857]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:35:53 managed-node2 platform-python[14982]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:35:54 managed-node2 platform-python[15105]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:35:54 managed-node2 platform-python[15228]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 19 12:35:54 managed-node2 platform-python[15327]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/nopull.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752942954.3752043-9946-72044176595742/source _original_basename=tmpi7ylefvg follow=False checksum=d5dc917e3cae36de03aa971a17ac473f86fdf934 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Jul 19 12:35:55 managed-node2 platform-python[15452]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:35:55 managed-node2 kernel: evm: overlay not supported Jul 19 12:35:55 managed-node2 systemd[1]: var-lib-containers-storage-overlay-metacopy\x2dcheck103626253-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-metacopy\x2dcheck103626253-merged.mount has successfully entered the 'dead' state. Jul 19 12:35:55 managed-node2 systemd[1]: Created slice machine.slice. -- Subject: Unit machine.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine.slice has finished starting up. -- -- The start-up result is done. Jul 19 12:35:55 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice. -- Subject: Unit machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice has finished starting up. -- -- The start-up result is done. Jul 19 12:35:56 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 19 12:36:00 managed-node2 platform-python[15778]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:36:01 managed-node2 platform-python[15907]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:36:04 managed-node2 platform-python[16032]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:36:07 managed-node2 platform-python[16155]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 19 12:36:08 managed-node2 platform-python[16282]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 19 12:36:09 managed-node2 platform-python[16409]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 19 12:36:10 managed-node2 platform-python[16532]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:36:13 managed-node2 platform-python[16655]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:36:16 managed-node2 platform-python[16778]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:36:19 managed-node2 platform-python[16901]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 19 12:36:21 managed-node2 platform-python[17049]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 19 12:36:22 managed-node2 platform-python[17172]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 19 12:36:26 managed-node2 platform-python[17295]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:36:28 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 19 12:36:28 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 19 12:36:29 managed-node2 platform-python[17558]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:36:29 managed-node2 platform-python[17681]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:36:29 managed-node2 platform-python[17804]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 19 12:36:30 managed-node2 platform-python[17903]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/bogus.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752942989.6065838-11409-115866441513393/source _original_basename=tmpinaqg9cl follow=False checksum=f8266a972ed3be7e204d2a67883fe3a22b8dbf18 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Jul 19 12:36:30 managed-node2 platform-python[18028]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:36:30 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice. -- Subject: Unit machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice has finished starting up. -- -- The start-up result is done. Jul 19 12:36:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 19 12:36:34 managed-node2 platform-python[18315]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:36:35 managed-node2 platform-python[18444]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:36:37 managed-node2 platform-python[18569]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:36:40 managed-node2 platform-python[18692]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 19 12:36:41 managed-node2 platform-python[18819]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 19 12:36:41 managed-node2 platform-python[18946]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 19 12:36:43 managed-node2 platform-python[19069]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:36:46 managed-node2 platform-python[19192]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:36:49 managed-node2 platform-python[19315]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:36:52 managed-node2 platform-python[19438]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 19 12:36:54 managed-node2 platform-python[19586]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 19 12:36:54 managed-node2 platform-python[19709]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 19 12:36:59 managed-node2 platform-python[19832]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:37:00 managed-node2 platform-python[19957]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:37:01 managed-node2 platform-python[20081]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 19 12:37:01 managed-node2 platform-python[20208]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:37:02 managed-node2 platform-python[20333]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:37:02 managed-node2 platform-python[20333]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/nopull.yml Jul 19 12:37:02 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice. -- Subject: Unit machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice has finished shutting down. Jul 19 12:37:02 managed-node2 systemd[1]: machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice: Consumed 0 CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice completed and consumed the indicated resources. Jul 19 12:37:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 19 12:37:02 managed-node2 platform-python[20471]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:37:03 managed-node2 platform-python[20594]: ansible-command Invoked with _raw_params=podman image prune -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:37:06 managed-node2 platform-python[20849]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:37:07 managed-node2 platform-python[20978]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:37:10 managed-node2 platform-python[21103]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:37:13 managed-node2 platform-python[21226]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 19 12:37:14 managed-node2 platform-python[21353]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 19 12:37:15 managed-node2 platform-python[21480]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 19 12:37:16 managed-node2 platform-python[21603]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:37:19 managed-node2 platform-python[21726]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:37:22 managed-node2 platform-python[21849]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:37:25 managed-node2 platform-python[21972]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 19 12:37:27 managed-node2 platform-python[22120]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 19 12:37:28 managed-node2 platform-python[22243]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 19 12:37:32 managed-node2 platform-python[22366]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:37:34 managed-node2 platform-python[22491]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:37:34 managed-node2 platform-python[22615]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 19 12:37:35 managed-node2 platform-python[22742]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:37:35 managed-node2 platform-python[22867]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:37:35 managed-node2 platform-python[22867]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/bogus.yml Jul 19 12:37:35 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice. -- Subject: Unit machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice has finished shutting down. Jul 19 12:37:35 managed-node2 systemd[1]: machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice: Consumed 0 CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice completed and consumed the indicated resources. Jul 19 12:37:35 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 19 12:37:36 managed-node2 platform-python[23006]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:37:36 managed-node2 platform-python[23129]: ansible-command Invoked with _raw_params=podman image prune -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:37:40 managed-node2 platform-python[23384]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:37:41 managed-node2 platform-python[23513]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:37:44 managed-node2 platform-python[23638]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:37:47 managed-node2 platform-python[23761]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 19 12:37:47 managed-node2 platform-python[23888]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 19 12:37:48 managed-node2 platform-python[24015]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 19 12:37:50 managed-node2 platform-python[24138]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:37:52 managed-node2 platform-python[24261]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:37:55 managed-node2 platform-python[24384]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:37:58 managed-node2 platform-python[24507]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 19 12:38:00 managed-node2 platform-python[24655]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 19 12:38:01 managed-node2 platform-python[24778]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 19 12:38:05 managed-node2 platform-python[24901]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jul 19 12:38:06 managed-node2 platform-python[25025]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:38:06 managed-node2 platform-python[25150]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:06 managed-node2 platform-python[25274]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:08 managed-node2 platform-python[25398]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:08 managed-node2 platform-python[25522]: ansible-command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jul 19 12:38:08 managed-node2 systemd[1]: Created slice User Slice of UID 3001. -- Subject: Unit user-3001.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-3001.slice has finished starting up. -- -- The start-up result is done. Jul 19 12:38:08 managed-node2 systemd[1]: Starting User runtime directory /run/user/3001... -- Subject: Unit user-runtime-dir@3001.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@3001.service has begun starting up. Jul 19 12:38:08 managed-node2 systemd[1]: Started User runtime directory /run/user/3001. -- Subject: Unit user-runtime-dir@3001.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@3001.service has finished starting up. -- -- The start-up result is done. Jul 19 12:38:08 managed-node2 systemd[1]: Starting User Manager for UID 3001... -- Subject: Unit user@3001.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@3001.service has begun starting up. Jul 19 12:38:08 managed-node2 systemd[25528]: pam_unix(systemd-user:session): session opened for user podman_basic_user by (uid=0) Jul 19 12:38:08 managed-node2 systemd[25528]: Reached target Paths. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:08 managed-node2 systemd[25528]: Starting D-Bus User Message Bus Socket. -- Subject: Unit UNIT has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun starting up. Jul 19 12:38:08 managed-node2 systemd[25528]: Started Mark boot as successful after the user session has run 2 minutes. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:08 managed-node2 systemd[25528]: Reached target Timers. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:08 managed-node2 systemd[25528]: Listening on D-Bus User Message Bus Socket. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:08 managed-node2 systemd[25528]: Reached target Sockets. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:08 managed-node2 systemd[25528]: Reached target Basic System. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:08 managed-node2 systemd[25528]: Reached target Default. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:08 managed-node2 systemd[25528]: Startup finished in 28ms. -- Subject: User manager start-up is now complete -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The user manager instance for user 3001 has been started. All services queued -- for starting have been started. Note that other services might still be starting -- up or be started at any later time. -- -- Startup of the manager took 28712 microseconds. Jul 19 12:38:08 managed-node2 systemd[1]: Started User Manager for UID 3001. -- Subject: Unit user@3001.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@3001.service has finished starting up. -- -- The start-up result is done. Jul 19 12:38:09 managed-node2 platform-python[25663]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:38:10 managed-node2 platform-python[25786]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:38:10 managed-node2 sudo[25909]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ziadsxoqzpztrsztgsdmjtfdqrqqbghq ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943090.202407-15734-110385486361950/AnsiballZ_podman_image.py' Jul 19 12:38:10 managed-node2 sudo[25909]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:38:10 managed-node2 systemd[25528]: Started D-Bus User Message Bus. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:10 managed-node2 systemd[25528]: Created slice user.slice. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:10 managed-node2 systemd[25528]: Started podman-25921.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:10 managed-node2 systemd[25528]: Started podman-pause-1458d7a0.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:10 managed-node2 systemd[25528]: Started podman-25939.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:11 managed-node2 systemd[25528]: Started podman-25955.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:11 managed-node2 sudo[25909]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:38:11 managed-node2 platform-python[26084]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:38:12 managed-node2 platform-python[26207]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:38:12 managed-node2 platform-python[26330]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 19 12:38:13 managed-node2 platform-python[26429]: ansible-copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752943092.4331284-15846-26644570363473/source _original_basename=tmp0dg28w0o follow=False checksum=fe0b16bd085957dfbf8e2496934305469d165478 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Jul 19 12:38:13 managed-node2 sudo[26554]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-louqgipewhnyaovmbewiqaddtljctvmr ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943093.148976-15887-61767546226259/AnsiballZ_podman_play.py' Jul 19 12:38:13 managed-node2 sudo[26554]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:38:13 managed-node2 platform-python[26557]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:38:13 managed-node2 systemd[25528]: Started podman-26565.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:13 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6 Jul 19 12:38:13 managed-node2 systemd[25528]: Started rootless-netns-6ed4b4b3.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:13 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Jul 19 12:38:13 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vetha780888b: link is not ready Jul 19 12:38:13 managed-node2 kernel: cni-podman1: port 1(vetha780888b) entered blocking state Jul 19 12:38:13 managed-node2 kernel: cni-podman1: port 1(vetha780888b) entered disabled state Jul 19 12:38:13 managed-node2 kernel: device vetha780888b entered promiscuous mode Jul 19 12:38:13 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready Jul 19 12:38:13 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready Jul 19 12:38:13 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vetha780888b: link becomes ready Jul 19 12:38:13 managed-node2 kernel: cni-podman1: port 1(vetha780888b) entered blocking state Jul 19 12:38:13 managed-node2 kernel: cni-podman1: port 1(vetha780888b) entered forwarding state Jul 19 12:38:14 managed-node2 dnsmasq[26752]: listening on cni-podman1(#3): 10.89.0.1 Jul 19 12:38:14 managed-node2 dnsmasq[26754]: started, version 2.79 cachesize 150 Jul 19 12:38:14 managed-node2 dnsmasq[26754]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Jul 19 12:38:14 managed-node2 dnsmasq[26754]: using local addresses only for domain dns.podman Jul 19 12:38:14 managed-node2 dnsmasq[26754]: reading /etc/resolv.conf Jul 19 12:38:14 managed-node2 dnsmasq[26754]: using local addresses only for domain dns.podman Jul 19 12:38:14 managed-node2 dnsmasq[26754]: using nameserver 10.0.2.3#53 Jul 19 12:38:14 managed-node2 dnsmasq[26754]: using nameserver 10.29.169.13#53 Jul 19 12:38:14 managed-node2 dnsmasq[26754]: using nameserver 10.29.170.12#53 Jul 19 12:38:14 managed-node2 dnsmasq[26754]: using nameserver 10.2.32.1#53 Jul 19 12:38:14 managed-node2 dnsmasq[26754]: read /run/user/3001/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Jul 19 12:38:14 managed-node2 conmon[26767]: conmon f153d4517c8778d9470c : failed to write to /proc/self/oom_score_adj: Permission denied Jul 19 12:38:14 managed-node2 conmon[26768]: conmon f153d4517c8778d9470c : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach} Jul 19 12:38:14 managed-node2 conmon[26768]: conmon f153d4517c8778d9470c : terminal_ctrl_fd: 14 Jul 19 12:38:14 managed-node2 conmon[26768]: conmon f153d4517c8778d9470c : winsz read side: 17, winsz write side: 18 Jul 19 12:38:14 managed-node2 conmon[26768]: conmon f153d4517c8778d9470c : container PID: 26778 Jul 19 12:38:14 managed-node2 conmon[26788]: conmon a8773b3857e3e0dd4e13 : failed to write to /proc/self/oom_score_adj: Permission denied Jul 19 12:38:14 managed-node2 conmon[26789]: conmon a8773b3857e3e0dd4e13 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Jul 19 12:38:14 managed-node2 conmon[26789]: conmon a8773b3857e3e0dd4e13 : terminal_ctrl_fd: 13 Jul 19 12:38:14 managed-node2 conmon[26789]: conmon a8773b3857e3e0dd4e13 : winsz read side: 16, winsz write side: 17 Jul 19 12:38:14 managed-node2 conmon[26789]: conmon a8773b3857e3e0dd4e13 : container PID: 26799 Jul 19 12:38:14 managed-node2 platform-python[26557]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jul 19 12:38:14 managed-node2 platform-python[26557]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: 0c3499cd78df9c3cbd4aa0aa8e91445278eb6157f70949cc299bc9c609a42f3c Container: a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 Jul 19 12:38:14 managed-node2 platform-python[26557]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-07-19T12:38:13-04:00" level=info msg="/bin/podman filtering at log level debug" time="2025-07-19T12:38:13-04:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-07-19T12:38:13-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-07-19T12:38:13-04:00" level=info msg="Using sqlite as database backend" time="2025-07-19T12:38:13-04:00" level=debug msg="Using graph driver overlay" time="2025-07-19T12:38:13-04:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2025-07-19T12:38:13-04:00" level=debug msg="Using run root /run/user/3001/containers" time="2025-07-19T12:38:13-04:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2025-07-19T12:38:13-04:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2025-07-19T12:38:13-04:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2025-07-19T12:38:13-04:00" level=debug msg="Using transient store: false" time="2025-07-19T12:38:13-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-19T12:38:13-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-19T12:38:13-04:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2025-07-19T12:38:13-04:00" level=debug msg="Cached value indicated that native-diff is usable" time="2025-07-19T12:38:13-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2025-07-19T12:38:13-04:00" level=debug msg="Initializing event backend file" time="2025-07-19T12:38:13-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-07-19T12:38:13-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-07-19T12:38:13-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-07-19T12:38:13-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-07-19T12:38:13-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-07-19T12:38:13-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-07-19T12:38:13-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-07-19T12:38:13-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" time="2025-07-19T12:38:13-04:00" level=info msg="Setting parallel job count to 7" time="2025-07-19T12:38:13-04:00" level=debug msg="Successfully loaded 1 networks" time="2025-07-19T12:38:13-04:00" level=debug msg="found free device name cni-podman1" time="2025-07-19T12:38:13-04:00" level=debug msg="found free ipv4 network subnet 10.89.0.0/24" time="2025-07-19T12:38:13-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-19 12:38:13.521272 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-07-19T12:38:13-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-07-19T12:38:13-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-19T12:38:13-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:13-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-19T12:38:13-04:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\" does not resolve to an image ID" time="2025-07-19T12:38:13-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-19T12:38:13-04:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\" does not resolve to an image ID" time="2025-07-19T12:38:13-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-19T12:38:13-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:13-04:00" level=debug msg="FROM \"scratch\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2025-07-19T12:38:13-04:00" level=debug msg="Check for idmapped mounts support " time="2025-07-19T12:38:13-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:13-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:13-04:00" level=debug msg="overlay: test mount indicated that volatile is being used" time="2025-07-19T12:38:13-04:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/340addd39a0119ab1a39a6915aa4a3fafdf78511c642616c3fe04da09dcf3345/empty,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/340addd39a0119ab1a39a6915aa4a3fafdf78511c642616c3fe04da09dcf3345/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/340addd39a0119ab1a39a6915aa4a3fafdf78511c642616c3fe04da09dcf3345/work,userxattr,volatile,context=\"system_u:object_r:container_file_t:s0:c480,c514\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Container ID: ac8e6c0ad9d62a1134f2644b1390fd8fa36d22d0d6282cefc7edd95b4f95d64d" time="2025-07-19T12:38:13-04:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:copy Args:[/usr/libexec/podman/catatonit /catatonit] Flags:[] Attrs:map[] Message:COPY /usr/libexec/podman/catatonit /catatonit Heredocs:[] Original:COPY /usr/libexec/podman/catatonit /catatonit}" time="2025-07-19T12:38:13-04:00" level=debug msg="COPY []string(nil), imagebuilder.Copy{FromFS:false, From:\"\", Src:[]string{\"/usr/libexec/podman/catatonit\"}, Dest:\"/catatonit\", Download:false, Chown:\"\", Chmod:\"\", Checksum:\"\", Files:[]imagebuilder.File(nil)}" time="2025-07-19T12:38:13-04:00" level=debug msg="added content file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd" time="2025-07-19T12:38:13-04:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:entrypoint Args:[/catatonit -P] Flags:[] Attrs:map[json:true] Message:ENTRYPOINT /catatonit -P Heredocs:[] Original:ENTRYPOINT [\"/catatonit\", \"-P\"]}" time="2025-07-19T12:38:13-04:00" level=debug msg="COMMIT localhost/podman-pause:4.9.4-dev-1708535009" time="2025-07-19T12:38:13-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\"" time="2025-07-19T12:38:13-04:00" level=debug msg="COMMIT \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\"" time="2025-07-19T12:38:13-04:00" level=debug msg="committing image with reference \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\" is allowed by policy" time="2025-07-19T12:38:13-04:00" level=debug msg="layer list: [\"340addd39a0119ab1a39a6915aa4a3fafdf78511c642616c3fe04da09dcf3345\"]" time="2025-07-19T12:38:13-04:00" level=debug msg="using \"/var/tmp/buildah2427832820\" to hold temporary data" time="2025-07-19T12:38:13-04:00" level=debug msg="Tar with options on /home/podman_basic_user/.local/share/containers/storage/overlay/340addd39a0119ab1a39a6915aa4a3fafdf78511c642616c3fe04da09dcf3345/diff" time="2025-07-19T12:38:13-04:00" level=debug msg="layer \"340addd39a0119ab1a39a6915aa4a3fafdf78511c642616c3fe04da09dcf3345\" size is 767488 bytes, uncompressed digest sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690, possibly-compressed digest sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690" time="2025-07-19T12:38:13-04:00" level=debug msg="OCIv1 config = {\"created\":\"2025-07-19T16:38:13.656892898Z\",\"architecture\":\"amd64\",\"os\":\"linux\",\"config\":{\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Entrypoint\":[\"/catatonit\",\"-P\"],\"Labels\":{\"io.buildah.version\":\"1.33.5\"}},\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"]},\"history\":[{\"created\":\"2025-07-19T16:38:13.656345599Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-07-19T16:38:13.660597339Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-07-19T12:38:13-04:00" level=debug msg="OCIv1 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.oci.image.manifest.v1+json\",\"config\":{\"mediaType\":\"application/vnd.oci.image.config.v1+json\",\"digest\":\"sha256:9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\",\"size\":668},\"layers\":[{\"mediaType\":\"application/vnd.oci.image.layer.v1.tar\",\"digest\":\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\",\"size\":767488}],\"annotations\":{\"org.opencontainers.image.base.digest\":\"\",\"org.opencontainers.image.base.name\":\"\"}}" time="2025-07-19T12:38:13-04:00" level=debug msg="Docker v2s2 config = {\"created\":\"2025-07-19T16:38:13.656892898Z\",\"container\":\"ac8e6c0ad9d62a1134f2644b1390fd8fa36d22d0d6282cefc7edd95b4f95d64d\",\"container_config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":[],\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.33.5\"}},\"config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":[],\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.33.5\"}},\"architecture\":\"amd64\",\"os\":\"linux\",\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"]},\"history\":[{\"created\":\"2025-07-19T16:38:13.656345599Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-07-19T16:38:13.660597339Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-07-19T12:38:13-04:00" level=debug msg="Docker v2s2 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.docker.distribution.manifest.v2+json\",\"config\":{\"mediaType\":\"application/vnd.docker.container.image.v1+json\",\"size\":1342,\"digest\":\"sha256:803cd64c1bc1a2e7297b3d5f520a915c581e4037aabac925fb21fc3ad8b279ee\"},\"layers\":[{\"mediaType\":\"application/vnd.docker.image.rootfs.diff.tar\",\"size\":767488,\"digest\":\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"}]}" time="2025-07-19T12:38:13-04:00" level=debug msg="Using SQLite blob info cache at /home/podman_basic_user/.local/share/containers/cache/blob-info-cache-v1.sqlite" time="2025-07-19T12:38:13-04:00" level=debug msg="IsRunningImageAllowed for image containers-storage:" time="2025-07-19T12:38:13-04:00" level=debug msg=" Using transport \"containers-storage\" policy section " time="2025-07-19T12:38:13-04:00" level=debug msg=" Requirement 0: allowed" time="2025-07-19T12:38:13-04:00" level=debug msg="Overall: allowed" time="2025-07-19T12:38:13-04:00" level=debug msg="start reading config" time="2025-07-19T12:38:13-04:00" level=debug msg="finished reading config" time="2025-07-19T12:38:13-04:00" level=debug msg="Manifest has MIME type application/vnd.oci.image.manifest.v1+json, ordered candidate list [application/vnd.oci.image.manifest.v1+json, application/vnd.docker.distribution.manifest.v2+json, application/vnd.docker.distribution.manifest.v1+prettyjws, application/vnd.docker.distribution.manifest.v1+json]" time="2025-07-19T12:38:13-04:00" level=debug msg="... will first try using the original manifest unmodified" time="2025-07-19T12:38:13-04:00" level=debug msg="Checking if we can reuse blob sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690: general substitution = true, compression for MIME type \"application/vnd.oci.image.layer.v1.tar\" = true" time="2025-07-19T12:38:13-04:00" level=debug msg="reading layer \"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"" time="2025-07-19T12:38:13-04:00" level=debug msg="No compression detected" time="2025-07-19T12:38:13-04:00" level=debug msg="Using original blob without modification" time="2025-07-19T12:38:13-04:00" level=debug msg="Applying tar in /home/podman_basic_user/.local/share/containers/storage/overlay/d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690/diff" time="2025-07-19T12:38:13-04:00" level=debug msg="finished reading layer \"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"" time="2025-07-19T12:38:13-04:00" level=debug msg="No compression detected" time="2025-07-19T12:38:13-04:00" level=debug msg="Compression change for blob sha256:9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63 (\"application/vnd.oci.image.config.v1+json\") not supported" time="2025-07-19T12:38:13-04:00" level=debug msg="Using original blob without modification" time="2025-07-19T12:38:13-04:00" level=debug msg="setting image creation date to 2025-07-19 16:38:13.656892898 +0000 UTC" time="2025-07-19T12:38:13-04:00" level=debug msg="created new image ID \"9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\" with metadata \"{}\"" time="2025-07-19T12:38:13-04:00" level=debug msg="added name \"localhost/podman-pause:4.9.4-dev-1708535009\" to image \"9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\"" time="2025-07-19T12:38:13-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\"" time="2025-07-19T12:38:13-04:00" level=debug msg="printing final image id \"9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Pod using bridge network mode" time="2025-07-19T12:38:13-04:00" level=debug msg="Got pod cgroup as /libpod_parent/0c3499cd78df9c3cbd4aa0aa8e91445278eb6157f70949cc299bc9c609a42f3c" time="2025-07-19T12:38:13-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-19T12:38:13-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:13-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-19T12:38:13-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-19T12:38:13-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63)" time="2025-07-19T12:38:13-04:00" level=debug msg="exporting opaque data as blob \"sha256:9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Inspecting image 9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63" time="2025-07-19T12:38:13-04:00" level=debug msg="exporting opaque data as blob \"sha256:9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Inspecting image 9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63" time="2025-07-19T12:38:13-04:00" level=debug msg="Inspecting image 9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63" time="2025-07-19T12:38:13-04:00" level=debug msg="using systemd mode: false" time="2025-07-19T12:38:13-04:00" level=debug msg="setting container name 0c3499cd78df-infra" time="2025-07-19T12:38:13-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Allocated lock 1 for container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c" time="2025-07-19T12:38:13-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\"" time="2025-07-19T12:38:13-04:00" level=debug msg="exporting opaque data as blob \"sha256:9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Created container \"f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Container \"f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Container \"f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c\" has run directory \"/run/user/3001/containers/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:13-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:13-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-19T12:38:13-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:13-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-19T12:38:13-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-07-19T12:38:13-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:13-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:13-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-19T12:38:13-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:13-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-19T12:38:13-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:13-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:13-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-19T12:38:13-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:13-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-19T12:38:13-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-19T12:38:13-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:13-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:13-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-19T12:38:13-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:13-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-19T12:38:13-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-19T12:38:13-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-19T12:38:13-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-19T12:38:13-04:00" level=debug msg="using systemd mode: false" time="2025-07-19T12:38:13-04:00" level=debug msg="adding container to pod httpd1" time="2025-07-19T12:38:13-04:00" level=debug msg="setting container name httpd1-httpd1" time="2025-07-19T12:38:13-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-07-19T12:38:13-04:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-07-19T12:38:13-04:00" level=debug msg="Adding mount /proc" time="2025-07-19T12:38:13-04:00" level=debug msg="Adding mount /dev" time="2025-07-19T12:38:13-04:00" level=debug msg="Adding mount /dev/pts" time="2025-07-19T12:38:13-04:00" level=debug msg="Adding mount /dev/mqueue" time="2025-07-19T12:38:13-04:00" level=debug msg="Adding mount /sys" time="2025-07-19T12:38:13-04:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-07-19T12:38:13-04:00" level=debug msg="Allocated lock 2 for container a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458" time="2025-07-19T12:38:13-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Created container \"a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Container \"a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Container \"a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458\" has run directory \"/run/user/3001/containers/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Strongconnecting node f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c" time="2025-07-19T12:38:13-04:00" level=debug msg="Pushed f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c onto stack" time="2025-07-19T12:38:13-04:00" level=debug msg="Finishing node f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c. Popped f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c off stack" time="2025-07-19T12:38:13-04:00" level=debug msg="Strongconnecting node a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458" time="2025-07-19T12:38:13-04:00" level=debug msg="Pushed a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 onto stack" time="2025-07-19T12:38:13-04:00" level=debug msg="Finishing node a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458. Popped a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 off stack" time="2025-07-19T12:38:13-04:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/T3ZNBLNG2W7D2UELJU7O7YZ76X,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/127ba02fe95229d135be77cff068ce7d066eb98b87e356cce8c34501e113568e/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/127ba02fe95229d135be77cff068ce7d066eb98b87e356cce8c34501e113568e/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c330,c361\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Mounted container \"f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/127ba02fe95229d135be77cff068ce7d066eb98b87e356cce8c34501e113568e/merged\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Created root filesystem for container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c at /home/podman_basic_user/.local/share/containers/storage/overlay/127ba02fe95229d135be77cff068ce7d066eb98b87e356cce8c34501e113568e/merged" time="2025-07-19T12:38:13-04:00" level=debug msg="Made network namespace at /run/user/3001/netns/netns-06b3cc7d-4137-7077-edbe-bd7530bc2101 for container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c" time="2025-07-19T12:38:13-04:00" level=debug msg="creating rootless network namespace with name \"rootless-netns-d22c9f230d0691b8f418\"" time="2025-07-19T12:38:13-04:00" level=debug msg="slirp4netns command: /bin/slirp4netns --disable-host-loopback --mtu=65520 --enable-sandbox --enable-seccomp --enable-ipv6 -c -r 3 --netns-type=path /run/user/3001/netns/rootless-netns-d22c9f230d0691b8f418 tap0" time="2025-07-19T12:38:13-04:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" time="2025-07-19T12:38:14-04:00" level=debug msg="cni result for container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c network podman-default-kube-network: &{0.4.0 [{Name:cni-podman1 Mac:76:19:76:c8:78:b3 Sandbox:} {Name:vetha780888b Mac:f2:ee:6b:fd:41:a0 Sandbox:} {Name:eth0 Mac:2e:67:99:01:50:2a Sandbox:/run/user/3001/netns/netns-06b3cc7d-4137-7077-edbe-bd7530bc2101}] [{Version:4 Interface:0xc000c00b08 Address:{IP:10.89.0.2 Mask:ffffff00} Gateway:10.89.0.1}] [{Dst:{IP:0.0.0.0 Mask:00000000} GW:}] {[10.89.0.1] [dns.podman] []}}" time="2025-07-19T12:38:14-04:00" level=debug msg="rootlessport: time=\"2025-07-19T12:38:14-04:00\" level=info msg=\"Starting parent driver\"\n" time="2025-07-19T12:38:14-04:00" level=debug msg="rootlessport: time=\"2025-07-19T12:38:14-04:00\" level=info msg=\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport2421233428/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport2421233428/.bp.sock]\"\n" time="2025-07-19T12:38:14-04:00" level=debug msg="rootlessport: time=\"2025-07-19T12:38:14-04:00\" level=info msg=\"Starting child driver in child netns (\\\"/proc/self/exe\\\" [rootlessport-child])\"\n" time="2025-07-19T12:38:14-04:00" level=debug msg="rootlessport: time=\"2025-07-19T12:38:14-04:00\" level=info msg=\"Waiting for initComplete\"\n" time="2025-07-19T12:38:14-04:00" level=debug msg="rootlessport: time=\"2025-07-19T12:38:14-04:00\" level=info msg=\"initComplete is closed; parent and child established the communication channel\"\n" time="2025-07-19T12:38:14-04:00" level=debug msg="rootlessport: time=\"2025-07-19T12:38:14-04:00\" level=info msg=\"Exposing ports [{ 80 15001 1 tcp}]\"\n" time="2025-07-19T12:38:14-04:00" level=debug msg="rootlessport is ready" time="2025-07-19T12:38:14-04:00" level=debug msg="rootlessport: time=\"2025-07-19T12:38:14-04:00\" level=info msg=Ready\n" time="2025-07-19T12:38:14-04:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2025-07-19T12:38:14-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-07-19T12:38:14-04:00" level=debug msg="Workdir \"/\" resolved to host path \"/home/podman_basic_user/.local/share/containers/storage/overlay/127ba02fe95229d135be77cff068ce7d066eb98b87e356cce8c34501e113568e/merged\"" time="2025-07-19T12:38:14-04:00" level=debug msg="Created OCI spec for container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata/config.json" time="2025-07-19T12:38:14-04:00" level=debug msg="Got pod cgroup as " time="2025-07-19T12:38:14-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-07-19T12:38:14-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c -u f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c -r /usr/bin/runc -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata -p /run/user/3001/containers/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata/pidfile -n 0c3499cd78df-infra --exit-dir /run/user/3001/libpod/tmp/exits --full-attach -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg cgroupfs --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c]" time="2025-07-19T12:38:14-04:00" level=info msg="Failed to add conmon to cgroupfs sandbox cgroup: creating cgroup for blkio: mkdir /sys/fs/cgroup/blkio/libpod_parent: permission denied" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2025-07-19T12:38:14-04:00" level=debug msg="Received: 26778" time="2025-07-19T12:38:14-04:00" level=info msg="Got Conmon PID as 26768" time="2025-07-19T12:38:14-04:00" level=debug msg="Created container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c in OCI runtime" time="2025-07-19T12:38:14-04:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-07-19T12:38:14-04:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-07-19T12:38:14-04:00" level=debug msg="Starting container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c with command [/catatonit -P]" time="2025-07-19T12:38:14-04:00" level=debug msg="Started container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c" time="2025-07-19T12:38:14-04:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/Q6SNT2SFVF32LFZYXFZFNM34JV,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/30ccda56ccc67908370dcb7e56c6e44595e70d6af5e179ee106b4a3d90b31e0f/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/30ccda56ccc67908370dcb7e56c6e44595e70d6af5e179ee106b4a3d90b31e0f/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c330,c361\"" time="2025-07-19T12:38:14-04:00" level=debug msg="Mounted container \"a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/30ccda56ccc67908370dcb7e56c6e44595e70d6af5e179ee106b4a3d90b31e0f/merged\"" time="2025-07-19T12:38:14-04:00" level=debug msg="Created root filesystem for container a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 at /home/podman_basic_user/.local/share/containers/storage/overlay/30ccda56ccc67908370dcb7e56c6e44595e70d6af5e179ee106b4a3d90b31e0f/merged" time="2025-07-19T12:38:14-04:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2025-07-19T12:38:14-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-07-19T12:38:14-04:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2025-07-19T12:38:14-04:00" level=debug msg="Created OCI spec for container a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata/config.json" time="2025-07-19T12:38:14-04:00" level=debug msg="Got pod cgroup as " time="2025-07-19T12:38:14-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-07-19T12:38:14-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 -u a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 -r /usr/bin/runc -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata -p /run/user/3001/containers/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata/pidfile -n httpd1-httpd1 --exit-dir /run/user/3001/libpod/tmp/exits --full-attach -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg cgroupfs --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458]" time="2025-07-19T12:38:14-04:00" level=info msg="Failed to add conmon to cgroupfs sandbox cgroup: creating cgroup for cpu: mkdir /sys/fs/cgroup/cpu/conmon: permission denied" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2025-07-19T12:38:14-04:00" level=debug msg="Received: 26799" time="2025-07-19T12:38:14-04:00" level=info msg="Got Conmon PID as 26789" time="2025-07-19T12:38:14-04:00" level=debug msg="Created container a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 in OCI runtime" time="2025-07-19T12:38:14-04:00" level=debug msg="Starting container a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 with command [/bin/busybox-extras httpd -f -p 80]" time="2025-07-19T12:38:14-04:00" level=debug msg="Started container a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458" time="2025-07-19T12:38:14-04:00" level=debug msg="Called kube.PersistentPostRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-07-19T12:38:14-04:00" level=debug msg="Shutting down engines" Jul 19 12:38:14 managed-node2 platform-python[26557]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jul 19 12:38:14 managed-node2 sudo[26554]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:38:14 managed-node2 sudo[26930]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lmrmjlkkbsynluhmnwsuczlkvrvxmsws ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943094.4993763-15928-49906019321868/AnsiballZ_systemd.py' Jul 19 12:38:14 managed-node2 sudo[26930]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:38:14 managed-node2 platform-python[26933]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Jul 19 12:38:14 managed-node2 systemd[25528]: Reloading. Jul 19 12:38:14 managed-node2 sudo[26930]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:38:15 managed-node2 dnsmasq[26754]: listening on cni-podman1(#3): fe80::7419:76ff:fec8:78b3%cni-podman1 Jul 19 12:38:15 managed-node2 sudo[27068]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gajwmakwkbshdhvfjxukfbkcepplsltd ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943095.0683737-15953-10632554991967/AnsiballZ_systemd.py' Jul 19 12:38:15 managed-node2 sudo[27068]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:38:15 managed-node2 platform-python[27071]: ansible-systemd Invoked with name= scope=user enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None user=None Jul 19 12:38:15 managed-node2 systemd[25528]: Reloading. Jul 19 12:38:15 managed-node2 sudo[27068]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:38:15 managed-node2 sudo[27207]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dcmefbhxtsrhgtubtgzkfqommjfyuibn ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943095.7285938-15986-176076416334674/AnsiballZ_systemd.py' Jul 19 12:38:15 managed-node2 sudo[27207]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:38:16 managed-node2 platform-python[27210]: ansible-systemd Invoked with name= scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None Jul 19 12:38:16 managed-node2 systemd[25528]: Created slice podman\x2dkube.slice. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:16 managed-node2 systemd[25528]: Starting A template for running K8s workloads via podman-kube-play... -- Subject: Unit UNIT has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun starting up. Jul 19 12:38:16 managed-node2 conmon[26789]: conmon a8773b3857e3e0dd4e13 : container 26799 exited with status 137 Jul 19 12:38:16 managed-node2 conmon[26768]: conmon f153d4517c8778d9470c : container 26778 exited with status 137 Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458)" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=info msg="Using sqlite as database backend" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using graph driver overlay" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using run root /run/user/3001/containers" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using transient store: false" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Cached value indicated that metacopy is not being used" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Cached value indicated that native-diff is usable" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Initializing event backend file" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=info msg="Setting parallel job count to 7" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c)" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=info msg="Using sqlite as database backend" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using graph driver overlay" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using run root /run/user/3001/containers" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using transient store: false" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Cached value indicated that metacopy is not being used" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Cached value indicated that native-diff is usable" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Initializing event backend file" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=info msg="Setting parallel job count to 7" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458)" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Shutting down engines" Jul 19 12:38:16 managed-node2 kernel: cni-podman1: port 1(vetha780888b) entered disabled state Jul 19 12:38:16 managed-node2 kernel: device vetha780888b left promiscuous mode Jul 19 12:38:16 managed-node2 kernel: cni-podman1: port 1(vetha780888b) entered disabled state Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c)" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Shutting down engines" Jul 19 12:38:16 managed-node2 podman[27216]: Pods stopped: Jul 19 12:38:16 managed-node2 podman[27216]: 0c3499cd78df9c3cbd4aa0aa8e91445278eb6157f70949cc299bc9c609a42f3c Jul 19 12:38:16 managed-node2 podman[27216]: Pods removed: Jul 19 12:38:16 managed-node2 podman[27216]: 0c3499cd78df9c3cbd4aa0aa8e91445278eb6157f70949cc299bc9c609a42f3c Jul 19 12:38:16 managed-node2 podman[27216]: Secrets removed: Jul 19 12:38:16 managed-node2 podman[27216]: Volumes removed: Jul 19 12:38:16 managed-node2 systemd[25528]: Started rootless-netns-1ff27aec.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:16 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): veth24653eaf: link is not ready Jul 19 12:38:16 managed-node2 kernel: cni-podman1: port 1(veth24653eaf) entered blocking state Jul 19 12:38:16 managed-node2 kernel: cni-podman1: port 1(veth24653eaf) entered disabled state Jul 19 12:38:16 managed-node2 kernel: device veth24653eaf entered promiscuous mode Jul 19 12:38:16 managed-node2 kernel: cni-podman1: port 1(veth24653eaf) entered blocking state Jul 19 12:38:16 managed-node2 kernel: cni-podman1: port 1(veth24653eaf) entered forwarding state Jul 19 12:38:16 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth24653eaf: link becomes ready Jul 19 12:38:16 managed-node2 dnsmasq[27465]: listening on cni-podman1(#3): 10.89.0.1 Jul 19 12:38:16 managed-node2 dnsmasq[27467]: started, version 2.79 cachesize 150 Jul 19 12:38:16 managed-node2 dnsmasq[27467]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Jul 19 12:38:16 managed-node2 dnsmasq[27467]: using local addresses only for domain dns.podman Jul 19 12:38:16 managed-node2 dnsmasq[27467]: reading /etc/resolv.conf Jul 19 12:38:16 managed-node2 dnsmasq[27467]: using local addresses only for domain dns.podman Jul 19 12:38:16 managed-node2 dnsmasq[27467]: using nameserver 10.0.2.3#53 Jul 19 12:38:16 managed-node2 dnsmasq[27467]: using nameserver 10.29.169.13#53 Jul 19 12:38:16 managed-node2 dnsmasq[27467]: using nameserver 10.29.170.12#53 Jul 19 12:38:16 managed-node2 dnsmasq[27467]: using nameserver 10.2.32.1#53 Jul 19 12:38:16 managed-node2 dnsmasq[27467]: read /run/user/3001/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Jul 19 12:38:17 managed-node2 podman[27216]: Pod: Jul 19 12:38:17 managed-node2 podman[27216]: 2d1d21944d4ca561d0a237f029182068a925bb49513d0d920c7ad527b530a87a Jul 19 12:38:17 managed-node2 podman[27216]: Container: Jul 19 12:38:17 managed-node2 podman[27216]: fbdb7144dbaf3a0b80484872c9bcae1ed8f6a793661386bc91aa084464c69027 Jul 19 12:38:17 managed-node2 systemd[25528]: Started A template for running K8s workloads via podman-kube-play. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:17 managed-node2 sudo[27207]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:38:17 managed-node2 platform-python[27643]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 19 12:38:18 managed-node2 platform-python[27767]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:38:18 managed-node2 dnsmasq[27467]: listening on cni-podman1(#3): fe80::f826:e2ff:fec6:eea3%cni-podman1 Jul 19 12:38:19 managed-node2 platform-python[27892]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:20 managed-node2 platform-python[28016]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:38:21 managed-node2 platform-python[28139]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:38:21 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 19 12:38:21 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 19 12:38:22 managed-node2 platform-python[28430]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:38:23 managed-node2 platform-python[28553]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:38:23 managed-node2 platform-python[28676]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 19 12:38:23 managed-node2 platform-python[28775]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752943103.1443834-16356-64021954424990/source _original_basename=tmp0hh2oj3u follow=False checksum=b06d991e561d2233cf906d852db9b578dc61ce26 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Jul 19 12:38:24 managed-node2 platform-python[28900]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:38:24 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice. -- Subject: Unit machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice has finished starting up. -- -- The start-up result is done. Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.3685] manager: (cni-podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.3718] manager: (vethf4165b4a): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Jul 19 12:38:24 managed-node2 systemd-udevd[28949]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Jul 19 12:38:24 managed-node2 systemd-udevd[28949]: Could not generate persistent MAC address for vethf4165b4a: No such file or directory Jul 19 12:38:24 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vethf4165b4a: link is not ready Jul 19 12:38:24 managed-node2 systemd-udevd[28948]: Using default interface naming scheme 'rhel-8.0'. Jul 19 12:38:24 managed-node2 systemd-udevd[28948]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Jul 19 12:38:24 managed-node2 systemd-udevd[28948]: Could not generate persistent MAC address for cni-podman1: No such file or directory Jul 19 12:38:24 managed-node2 kernel: cni-podman1: port 1(vethf4165b4a) entered blocking state Jul 19 12:38:24 managed-node2 kernel: cni-podman1: port 1(vethf4165b4a) entered disabled state Jul 19 12:38:24 managed-node2 kernel: device vethf4165b4a entered promiscuous mode Jul 19 12:38:24 managed-node2 dbus-daemon[591]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.5' (uid=0 pid=661 comm="/usr/sbin/NetworkManager --no-daemon " label="system_u:system_r:NetworkManager_t:s0") Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4140] device (cni-podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', sys-iface-state: 'external') Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4145] device (cni-podman1): state change: unavailable -> disconnected (reason 'connection-assumed', sys-iface-state: 'external') Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4153] device (cni-podman1): Activation: starting connection 'cni-podman1' (288926ec-c137-47aa-80eb-b1812c1bfed2) Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4154] device (cni-podman1): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'external') Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4157] device (cni-podman1): state change: prepare -> config (reason 'none', sys-iface-state: 'external') Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4159] device (cni-podman1): state change: config -> ip-config (reason 'none', sys-iface-state: 'external') Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4160] device (cni-podman1): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'external') Jul 19 12:38:24 managed-node2 systemd[1]: Starting Network Manager Script Dispatcher Service... -- Subject: Unit NetworkManager-dispatcher.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has begun starting up. Jul 19 12:38:24 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready Jul 19 12:38:24 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready Jul 19 12:38:24 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vethf4165b4a: link becomes ready Jul 19 12:38:24 managed-node2 kernel: cni-podman1: port 1(vethf4165b4a) entered blocking state Jul 19 12:38:24 managed-node2 kernel: cni-podman1: port 1(vethf4165b4a) entered forwarding state Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4643] device (vethf4165b4a): carrier: link connected Jul 19 12:38:24 managed-node2 dbus-daemon[591]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher' Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4647] device (cni-podman1): carrier: link connected Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4663] device (cni-podman1): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'external') Jul 19 12:38:24 managed-node2 systemd[1]: Started Network Manager Script Dispatcher Service. -- Subject: Unit NetworkManager-dispatcher.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has finished starting up. -- -- The start-up result is done. Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4665] device (cni-podman1): state change: secondaries -> activated (reason 'none', sys-iface-state: 'external') Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4669] device (cni-podman1): Activation: successful, device activated. Jul 19 12:38:24 managed-node2 dnsmasq[29070]: listening on cni-podman1(#3): 10.89.0.1 Jul 19 12:38:24 managed-node2 dnsmasq[29074]: started, version 2.79 cachesize 150 Jul 19 12:38:24 managed-node2 dnsmasq[29074]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Jul 19 12:38:24 managed-node2 dnsmasq[29074]: using local addresses only for domain dns.podman Jul 19 12:38:24 managed-node2 dnsmasq[29074]: reading /etc/resolv.conf Jul 19 12:38:24 managed-node2 dnsmasq[29074]: using local addresses only for domain dns.podman Jul 19 12:38:24 managed-node2 dnsmasq[29074]: using nameserver 10.29.169.13#53 Jul 19 12:38:24 managed-node2 dnsmasq[29074]: using nameserver 10.29.170.12#53 Jul 19 12:38:24 managed-node2 dnsmasq[29074]: using nameserver 10.2.32.1#53 Jul 19 12:38:24 managed-node2 dnsmasq[29074]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Jul 19 12:38:24 managed-node2 systemd[1]: Started libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope. -- Subject: Unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:24 managed-node2 conmon[29076]: conmon c67c85c8cb2aae9cc679 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Jul 19 12:38:24 managed-node2 conmon[29076]: conmon c67c85c8cb2aae9cc679 : terminal_ctrl_fd: 13 Jul 19 12:38:24 managed-node2 conmon[29076]: conmon c67c85c8cb2aae9cc679 : winsz read side: 17, winsz write side: 18 Jul 19 12:38:24 managed-node2 systemd[1]: Started libcontainer container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c. -- Subject: Unit libpod-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:24 managed-node2 conmon[29076]: conmon c67c85c8cb2aae9cc679 : container PID: 29087 Jul 19 12:38:24 managed-node2 systemd[1]: Started libpod-conmon-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope. -- Subject: Unit libpod-conmon-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:24 managed-node2 conmon[29098]: conmon add9038e274085c173b1 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach} Jul 19 12:38:24 managed-node2 conmon[29098]: conmon add9038e274085c173b1 : terminal_ctrl_fd: 12 Jul 19 12:38:24 managed-node2 conmon[29098]: conmon add9038e274085c173b1 : winsz read side: 16, winsz write side: 17 Jul 19 12:38:24 managed-node2 systemd[1]: Started libcontainer container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f. -- Subject: Unit libpod-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:24 managed-node2 conmon[29098]: conmon add9038e274085c173b1 : container PID: 29108 Jul 19 12:38:24 managed-node2 platform-python[28900]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Jul 19 12:38:24 managed-node2 platform-python[28900]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816 Container: add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f Jul 19 12:38:24 managed-node2 platform-python[28900]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-07-19T12:38:24-04:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2025-07-19T12:38:24-04:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-07-19T12:38:24-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-07-19T12:38:24-04:00" level=info msg="Using sqlite as database backend" time="2025-07-19T12:38:24-04:00" level=debug msg="Using graph driver overlay" time="2025-07-19T12:38:24-04:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Using run root /run/containers/storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2025-07-19T12:38:24-04:00" level=debug msg="Using tmp dir /run/libpod" time="2025-07-19T12:38:24-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2025-07-19T12:38:24-04:00" level=debug msg="Using transient store: false" time="2025-07-19T12:38:24-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-19T12:38:24-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-19T12:38:24-04:00" level=debug msg="Cached value indicated that metacopy is being used" time="2025-07-19T12:38:24-04:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2025-07-19T12:38:24-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2025-07-19T12:38:24-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2025-07-19T12:38:24-04:00" level=debug msg="Initializing event backend file" time="2025-07-19T12:38:24-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-07-19T12:38:24-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-07-19T12:38:24-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-07-19T12:38:24-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-07-19T12:38:24-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-07-19T12:38:24-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-07-19T12:38:24-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-07-19T12:38:24-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" time="2025-07-19T12:38:24-04:00" level=info msg="Setting parallel job count to 7" time="2025-07-19T12:38:24-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-19 12:35:55.640649556 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-07-19T12:38:24-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-07-19T12:38:24-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:24-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-19T12:38:24-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a)" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Pod using bridge network mode" time="2025-07-19T12:38:24-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice for parent machine.slice and name libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816" time="2025-07-19T12:38:24-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice" time="2025-07-19T12:38:24-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice" time="2025-07-19T12:38:24-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:24-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-19T12:38:24-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a)" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Inspecting image fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Inspecting image fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a" time="2025-07-19T12:38:24-04:00" level=debug msg="Inspecting image fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a" time="2025-07-19T12:38:24-04:00" level=debug msg="using systemd mode: false" time="2025-07-19T12:38:24-04:00" level=debug msg="setting container name f8000a88fe4a-infra" time="2025-07-19T12:38:24-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Allocated lock 1 for container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c" time="2025-07-19T12:38:24-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\"" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2025-07-19T12:38:24-04:00" level=debug msg="Check for idmapped mounts support " time="2025-07-19T12:38:24-04:00" level=debug msg="Created container \"c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Container \"c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\" has work directory \"/var/lib/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Container \"c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\" has run directory \"/run/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:24-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-19T12:38:24-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-07-19T12:38:24-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:24-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-19T12:38:24-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:24-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-19T12:38:24-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:24-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-19T12:38:24-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-19T12:38:24-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-19T12:38:24-04:00" level=debug msg="using systemd mode: false" time="2025-07-19T12:38:24-04:00" level=debug msg="adding container to pod httpd2" time="2025-07-19T12:38:24-04:00" level=debug msg="setting container name httpd2-httpd2" time="2025-07-19T12:38:24-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-07-19T12:38:24-04:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-07-19T12:38:24-04:00" level=debug msg="Adding mount /proc" time="2025-07-19T12:38:24-04:00" level=debug msg="Adding mount /dev" time="2025-07-19T12:38:24-04:00" level=debug msg="Adding mount /dev/pts" time="2025-07-19T12:38:24-04:00" level=debug msg="Adding mount /dev/mqueue" time="2025-07-19T12:38:24-04:00" level=debug msg="Adding mount /sys" time="2025-07-19T12:38:24-04:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-07-19T12:38:24-04:00" level=debug msg="Allocated lock 2 for container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f" time="2025-07-19T12:38:24-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Created container \"add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Container \"add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\" has work directory \"/var/lib/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Container \"add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\" has run directory \"/run/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Strongconnecting node c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c" time="2025-07-19T12:38:24-04:00" level=debug msg="Pushed c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c onto stack" time="2025-07-19T12:38:24-04:00" level=debug msg="Finishing node c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c. Popped c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c off stack" time="2025-07-19T12:38:24-04:00" level=debug msg="Strongconnecting node add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f" time="2025-07-19T12:38:24-04:00" level=debug msg="Pushed add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f onto stack" time="2025-07-19T12:38:24-04:00" level=debug msg="Finishing node add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f. Popped add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f off stack" time="2025-07-19T12:38:24-04:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/HXWSEHVDVE6HABOKZ6B2SSNLKD,upperdir=/var/lib/containers/storage/overlay/6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e/diff,workdir=/var/lib/containers/storage/overlay/6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c723,c1018\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Mounted container \"c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\" at \"/var/lib/containers/storage/overlay/6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e/merged\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Created root filesystem for container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c at /var/lib/containers/storage/overlay/6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e/merged" time="2025-07-19T12:38:24-04:00" level=debug msg="Made network namespace at /run/netns/netns-f67fee73-2bbe-5ce9-31b0-8129b0eb7f47 for container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c" time="2025-07-19T12:38:24-04:00" level=debug msg="cni result for container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c network podman-default-kube-network: &{0.4.0 [{Name:cni-podman1 Mac:ee:2d:d5:97:9a:6b Sandbox:} {Name:vethf4165b4a Mac:d2:23:54:53:0f:5f Sandbox:} {Name:eth0 Mac:ea:eb:9c:fe:80:d8 Sandbox:/run/netns/netns-f67fee73-2bbe-5ce9-31b0-8129b0eb7f47}] [{Version:4 Interface:0xc0005a9428 Address:{IP:10.89.0.2 Mask:ffffff00} Gateway:10.89.0.1}] [{Dst:{IP:0.0.0.0 Mask:00000000} GW:}] {[10.89.0.1] [dns.podman] []}}" time="2025-07-19T12:38:24-04:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2025-07-19T12:38:24-04:00" level=debug msg="Setting Cgroups for container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c to machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice:libpod:c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c" time="2025-07-19T12:38:24-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-07-19T12:38:24-04:00" level=debug msg="Workdir \"/\" resolved to host path \"/var/lib/containers/storage/overlay/6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e/merged\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Created OCI spec for container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c at /var/lib/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata/config.json" time="2025-07-19T12:38:24-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice for parent machine.slice and name libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816" time="2025-07-19T12:38:24-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice" time="2025-07-19T12:38:24-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice" time="2025-07-19T12:38:24-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-07-19T12:38:24-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c -u c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c -r /usr/bin/runc -b /var/lib/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata -p /run/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata/pidfile -n f8000a88fe4a-infra --exit-dir /run/libpod/exits --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c]" time="2025-07-19T12:38:24-04:00" level=info msg="Running conmon under slice machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice and unitName libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope" time="2025-07-19T12:38:24-04:00" level=debug msg="Received: 29087" time="2025-07-19T12:38:24-04:00" level=info msg="Got Conmon PID as 29076" time="2025-07-19T12:38:24-04:00" level=debug msg="Created container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c in OCI runtime" time="2025-07-19T12:38:24-04:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-07-19T12:38:24-04:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-07-19T12:38:24-04:00" level=debug msg="Starting container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c with command [/catatonit -P]" time="2025-07-19T12:38:24-04:00" level=debug msg="Started container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c" time="2025-07-19T12:38:24-04:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/HJUTFIUMULI3FBOA3A6VGXTPPL,upperdir=/var/lib/containers/storage/overlay/ea49505f4f2f1b3e93444d6d56c71c0ed1bb99d0acd9df081de88db0be157d45/diff,workdir=/var/lib/containers/storage/overlay/ea49505f4f2f1b3e93444d6d56c71c0ed1bb99d0acd9df081de88db0be157d45/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c723,c1018\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Mounted container \"add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\" at \"/var/lib/containers/storage/overlay/ea49505f4f2f1b3e93444d6d56c71c0ed1bb99d0acd9df081de88db0be157d45/merged\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Created root filesystem for container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f at /var/lib/containers/storage/overlay/ea49505f4f2f1b3e93444d6d56c71c0ed1bb99d0acd9df081de88db0be157d45/merged" time="2025-07-19T12:38:24-04:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2025-07-19T12:38:24-04:00" level=debug msg="Setting Cgroups for container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f to machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice:libpod:add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f" time="2025-07-19T12:38:24-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-07-19T12:38:24-04:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2025-07-19T12:38:24-04:00" level=debug msg="Created OCI spec for container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f at /var/lib/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata/config.json" time="2025-07-19T12:38:24-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice for parent machine.slice and name libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816" time="2025-07-19T12:38:24-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice" time="2025-07-19T12:38:24-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice" time="2025-07-19T12:38:24-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-07-19T12:38:24-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f -u add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f -r /usr/bin/runc -b /var/lib/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata -p /run/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f]" time="2025-07-19T12:38:24-04:00" level=info msg="Running conmon under slice machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice and unitName libpod-conmon-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope" time="2025-07-19T12:38:24-04:00" level=debug msg="Received: 29108" time="2025-07-19T12:38:24-04:00" level=info msg="Got Conmon PID as 29098" time="2025-07-19T12:38:24-04:00" level=debug msg="Created container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f in OCI runtime" time="2025-07-19T12:38:24-04:00" level=debug msg="Starting container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f with command [/bin/busybox-extras httpd -f -p 80]" time="2025-07-19T12:38:24-04:00" level=debug msg="Started container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f" time="2025-07-19T12:38:24-04:00" level=debug msg="Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-07-19T12:38:24-04:00" level=debug msg="Shutting down engines" Jul 19 12:38:24 managed-node2 platform-python[28900]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jul 19 12:38:25 managed-node2 platform-python[29239]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Jul 19 12:38:25 managed-node2 systemd[1]: Reloading. Jul 19 12:38:25 managed-node2 dnsmasq[29074]: listening on cni-podman1(#3): fe80::ec2d:d5ff:fe97:9a6b%cni-podman1 Jul 19 12:38:26 managed-node2 platform-python[29400]: ansible-systemd Invoked with name= scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None user=None Jul 19 12:38:26 managed-node2 systemd[1]: Reloading. Jul 19 12:38:26 managed-node2 platform-python[29563]: ansible-systemd Invoked with name= scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None Jul 19 12:38:26 managed-node2 systemd[1]: Created slice system-podman\x2dkube.slice. -- Subject: Unit system-podman\x2dkube.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit system-podman\x2dkube.slice has finished starting up. -- -- The start-up result is done. Jul 19 12:38:26 managed-node2 systemd[1]: Starting A template for running K8s workloads via podman-kube-play... -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun starting up. Jul 19 12:38:26 managed-node2 conmon[29076]: conmon c67c85c8cb2aae9cc679 : container 29087 exited with status 137 Jul 19 12:38:26 managed-node2 systemd[1]: libpod-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has successfully entered the 'dead' state. Jul 19 12:38:26 managed-node2 systemd[1]: libpod-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope: Consumed 32ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope completed and consumed the indicated resources. Jul 19 12:38:26 managed-node2 conmon[29098]: conmon add9038e274085c173b1 : container 29108 exited with status 137 Jul 19 12:38:26 managed-node2 systemd[1]: libpod-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope has successfully entered the 'dead' state. Jul 19 12:38:26 managed-node2 systemd[1]: libpod-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope: Consumed 33ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope completed and consumed the indicated resources. Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c)" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=info msg="Using sqlite as database backend" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f)" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=info msg="Using sqlite as database backend" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using graph driver overlay" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using graph root /var/lib/containers/storage" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using run root /run/containers/storage" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using tmp dir /run/libpod" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using transient store: false" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Cached value indicated that metacopy is being used" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Cached value indicated that native-diff is not being used" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Initializing event backend file" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=info msg="Setting parallel job count to 7" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using graph driver overlay" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using graph root /var/lib/containers/storage" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using run root /run/containers/storage" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using tmp dir /run/libpod" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using transient store: false" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Cached value indicated that metacopy is being used" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Cached value indicated that native-diff is not being used" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Initializing event backend file" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=info msg="Setting parallel job count to 7" Jul 19 12:38:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay-ea49505f4f2f1b3e93444d6d56c71c0ed1bb99d0acd9df081de88db0be157d45-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-ea49505f4f2f1b3e93444d6d56c71c0ed1bb99d0acd9df081de88db0be157d45-merged.mount has successfully entered the 'dead' state. Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f)" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Shutting down engines" Jul 19 12:38:26 managed-node2 systemd[1]: libpod-conmon-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope has successfully entered the 'dead' state. Jul 19 12:38:26 managed-node2 kernel: cni-podman1: port 1(vethf4165b4a) entered disabled state Jul 19 12:38:26 managed-node2 kernel: device vethf4165b4a left promiscuous mode Jul 19 12:38:26 managed-node2 kernel: cni-podman1: port 1(vethf4165b4a) entered disabled state Jul 19 12:38:26 managed-node2 systemd[1]: run-netns-netns\x2df67fee73\x2d2bbe\x2d5ce9\x2d31b0\x2d8129b0eb7f47.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-netns-netns\x2df67fee73\x2d2bbe\x2d5ce9\x2d31b0\x2d8129b0eb7f47.mount has successfully entered the 'dead' state. Jul 19 12:38:27 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c-userdata-shm.mount has successfully entered the 'dead' state. Jul 19 12:38:27 managed-node2 systemd[1]: var-lib-containers-storage-overlay-6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e-merged.mount has successfully entered the 'dead' state. Jul 19 12:38:27 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:27-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c)" Jul 19 12:38:27 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:27-04:00" level=debug msg="Shutting down engines" Jul 19 12:38:27 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 19 12:38:27 managed-node2 systemd[1]: Stopping libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope. -- Subject: Unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has begun shutting down. Jul 19 12:38:27 managed-node2 systemd[1]: libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has successfully entered the 'dead' state. Jul 19 12:38:27 managed-node2 systemd[1]: Stopped libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope. -- Subject: Unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has finished shutting down. Jul 19 12:38:27 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice. -- Subject: Unit machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice has finished shutting down. Jul 19 12:38:27 managed-node2 systemd[1]: machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice: Consumed 198ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice completed and consumed the indicated resources. Jul 19 12:38:27 managed-node2 podman[29570]: Pods stopped: Jul 19 12:38:27 managed-node2 podman[29570]: f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816 Jul 19 12:38:27 managed-node2 podman[29570]: Pods removed: Jul 19 12:38:27 managed-node2 podman[29570]: f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816 Jul 19 12:38:27 managed-node2 podman[29570]: Secrets removed: Jul 19 12:38:27 managed-node2 podman[29570]: Volumes removed: Jul 19 12:38:27 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice. -- Subject: Unit machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice has finished starting up. -- -- The start-up result is done. Jul 19 12:38:27 managed-node2 systemd[1]: Started libcontainer container 4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a. -- Subject: Unit libpod-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:27 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vetha38befe0: link is not ready Jul 19 12:38:27 managed-node2 systemd-udevd[29728]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Jul 19 12:38:27 managed-node2 systemd-udevd[29728]: Could not generate persistent MAC address for vetha38befe0: No such file or directory Jul 19 12:38:27 managed-node2 NetworkManager[661]: [1752943107.3392] manager: (vetha38befe0): new Veth device (/org/freedesktop/NetworkManager/Devices/5) Jul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered blocking state Jul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered disabled state Jul 19 12:38:27 managed-node2 kernel: device vetha38befe0 entered promiscuous mode Jul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered blocking state Jul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered forwarding state Jul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered disabled state Jul 19 12:38:27 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vetha38befe0: link becomes ready Jul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered blocking state Jul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered forwarding state Jul 19 12:38:27 managed-node2 NetworkManager[661]: [1752943107.3666] device (vetha38befe0): carrier: link connected Jul 19 12:38:27 managed-node2 NetworkManager[661]: [1752943107.3670] device (cni-podman1): carrier: link connected Jul 19 12:38:27 managed-node2 dnsmasq[29798]: listening on cni-podman1(#3): 10.89.0.1 Jul 19 12:38:27 managed-node2 dnsmasq[29798]: listening on cni-podman1(#3): fe80::ec2d:d5ff:fe97:9a6b%cni-podman1 Jul 19 12:38:27 managed-node2 dnsmasq[29802]: started, version 2.79 cachesize 150 Jul 19 12:38:27 managed-node2 dnsmasq[29802]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Jul 19 12:38:27 managed-node2 dnsmasq[29802]: using local addresses only for domain dns.podman Jul 19 12:38:27 managed-node2 dnsmasq[29802]: reading /etc/resolv.conf Jul 19 12:38:27 managed-node2 dnsmasq[29802]: using local addresses only for domain dns.podman Jul 19 12:38:27 managed-node2 dnsmasq[29802]: using nameserver 10.29.169.13#53 Jul 19 12:38:27 managed-node2 dnsmasq[29802]: using nameserver 10.29.170.12#53 Jul 19 12:38:27 managed-node2 dnsmasq[29802]: using nameserver 10.2.32.1#53 Jul 19 12:38:27 managed-node2 dnsmasq[29802]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Jul 19 12:38:27 managed-node2 systemd[1]: Started libcontainer container 64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3. -- Subject: Unit libpod-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:27 managed-node2 systemd[1]: Started libcontainer container d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425. -- Subject: Unit libpod-d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:27 managed-node2 podman[29570]: Pod: Jul 19 12:38:27 managed-node2 podman[29570]: 63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7 Jul 19 12:38:27 managed-node2 podman[29570]: Container: Jul 19 12:38:27 managed-node2 podman[29570]: d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425 Jul 19 12:38:27 managed-node2 systemd[1]: Started A template for running K8s workloads via podman-kube-play. -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished starting up. -- -- The start-up result is done. Jul 19 12:38:28 managed-node2 platform-python[29967]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:38:29 managed-node2 platform-python[30100]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:30 managed-node2 platform-python[30224]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:38:31 managed-node2 platform-python[30347]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:38:32 managed-node2 platform-python[30636]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:38:32 managed-node2 platform-python[30759]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:38:33 managed-node2 platform-python[30882]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 19 12:38:33 managed-node2 platform-python[30981]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752943113.1342852-16787-124035634200669/source _original_basename=tmpprx4cnlk follow=False checksum=6f620a32a353317135005413ecc9cbab44a8759d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Jul 19 12:38:34 managed-node2 platform-python[31106]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:38:34 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice. -- Subject: Unit machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice has finished starting up. -- -- The start-up result is done. Jul 19 12:38:34 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vethca04a2e2: link is not ready Jul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered blocking state Jul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered disabled state Jul 19 12:38:34 managed-node2 kernel: device vethca04a2e2 entered promiscuous mode Jul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered blocking state Jul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered forwarding state Jul 19 12:38:34 managed-node2 NetworkManager[661]: [1752943114.3363] manager: (vethca04a2e2): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Jul 19 12:38:34 managed-node2 systemd-udevd[31156]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Jul 19 12:38:34 managed-node2 systemd-udevd[31156]: Could not generate persistent MAC address for vethca04a2e2: No such file or directory Jul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered disabled state Jul 19 12:38:34 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready Jul 19 12:38:34 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready Jul 19 12:38:34 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vethca04a2e2: link becomes ready Jul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered blocking state Jul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered forwarding state Jul 19 12:38:34 managed-node2 NetworkManager[661]: [1752943114.3654] device (vethca04a2e2): carrier: link connected Jul 19 12:38:34 managed-node2 dnsmasq[29802]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 2 addresses Jul 19 12:38:34 managed-node2 systemd[1]: Started libpod-conmon-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope. -- Subject: Unit libpod-conmon-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:34 managed-node2 systemd[1]: Started libcontainer container 9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91. -- Subject: Unit libpod-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:34 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jul 19 12:38:34 managed-node2 systemd[1]: Started libpod-conmon-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope. -- Subject: Unit libpod-conmon-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:34 managed-node2 systemd[1]: Started libcontainer container 798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943. -- Subject: Unit libpod-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:35 managed-node2 platform-python[31387]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Jul 19 12:38:35 managed-node2 systemd[1]: Reloading. Jul 19 12:38:35 managed-node2 platform-python[31548]: ansible-systemd Invoked with name= scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None user=None Jul 19 12:38:35 managed-node2 systemd[1]: Reloading. Jul 19 12:38:36 managed-node2 platform-python[31703]: ansible-systemd Invoked with name= scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None Jul 19 12:38:36 managed-node2 systemd[1]: Starting A template for running K8s workloads via podman-kube-play... -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun starting up. Jul 19 12:38:36 managed-node2 systemd[1]: libpod-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope has successfully entered the 'dead' state. Jul 19 12:38:36 managed-node2 systemd[1]: libpod-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope: Consumed 34ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope completed and consumed the indicated resources. Jul 19 12:38:36 managed-node2 systemd[1]: libpod-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope has successfully entered the 'dead' state. Jul 19 12:38:36 managed-node2 systemd[1]: libpod-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope: Consumed 36ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope completed and consumed the indicated resources. Jul 19 12:38:36 managed-node2 dnsmasq[29802]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Jul 19 12:38:36 managed-node2 systemd[1]: var-lib-containers-storage-overlay-0e27a2de6423f234f5c5cc21592f99c374ae3f65ee2ffe512e2ea9260072c30b-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-0e27a2de6423f234f5c5cc21592f99c374ae3f65ee2ffe512e2ea9260072c30b-merged.mount has successfully entered the 'dead' state. Jul 19 12:38:36 managed-node2 systemd[1]: libpod-conmon-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope has successfully entered the 'dead' state. Jul 19 12:38:36 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered disabled state Jul 19 12:38:36 managed-node2 kernel: device vethca04a2e2 left promiscuous mode Jul 19 12:38:36 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered disabled state Jul 19 12:38:36 managed-node2 systemd[1]: run-netns-netns\x2dbc35cf78\x2d8b29\x2d812d\x2d8688\x2d6b3a472533c6.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-netns-netns\x2dbc35cf78\x2d8b29\x2d812d\x2d8688\x2d6b3a472533c6.mount has successfully entered the 'dead' state. Jul 19 12:38:36 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91-userdata-shm.mount has successfully entered the 'dead' state. Jul 19 12:38:36 managed-node2 systemd[1]: var-lib-containers-storage-overlay-50218d54ec26584adc0c1ba212de5d0b7c4329564918e9762d222c23ddef0ca1-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-50218d54ec26584adc0c1ba212de5d0b7c4329564918e9762d222c23ddef0ca1-merged.mount has successfully entered the 'dead' state. Jul 19 12:38:36 managed-node2 systemd[1]: libpod-conmon-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope has successfully entered the 'dead' state. Jul 19 12:38:36 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice. -- Subject: Unit machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice has finished shutting down. Jul 19 12:38:36 managed-node2 systemd[1]: machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice: Consumed 192ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice completed and consumed the indicated resources. Jul 19 12:38:36 managed-node2 podman[31710]: Pods stopped: Jul 19 12:38:36 managed-node2 podman[31710]: 7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a Jul 19 12:38:36 managed-node2 podman[31710]: Pods removed: Jul 19 12:38:36 managed-node2 podman[31710]: 7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a Jul 19 12:38:36 managed-node2 podman[31710]: Secrets removed: Jul 19 12:38:36 managed-node2 podman[31710]: Volumes removed: Jul 19 12:38:37 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice. -- Subject: Unit machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice has finished starting up. -- -- The start-up result is done. Jul 19 12:38:37 managed-node2 systemd[1]: Started libcontainer container c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c. -- Subject: Unit libpod-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:37 managed-node2 NetworkManager[661]: [1752943117.2271] manager: (vetha6d4d23e): new Veth device (/org/freedesktop/NetworkManager/Devices/7) Jul 19 12:38:37 managed-node2 systemd-udevd[31876]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Jul 19 12:38:37 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vetha6d4d23e: link is not ready Jul 19 12:38:37 managed-node2 systemd-udevd[31876]: Could not generate persistent MAC address for vetha6d4d23e: No such file or directory Jul 19 12:38:37 managed-node2 kernel: cni-podman1: port 2(vetha6d4d23e) entered blocking state Jul 19 12:38:37 managed-node2 kernel: cni-podman1: port 2(vetha6d4d23e) entered disabled state Jul 19 12:38:37 managed-node2 kernel: device vetha6d4d23e entered promiscuous mode Jul 19 12:38:37 managed-node2 kernel: cni-podman1: port 2(vetha6d4d23e) entered blocking state Jul 19 12:38:37 managed-node2 kernel: cni-podman1: port 2(vetha6d4d23e) entered forwarding state Jul 19 12:38:37 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vetha6d4d23e: link becomes ready Jul 19 12:38:37 managed-node2 NetworkManager[661]: [1752943117.2430] device (vetha6d4d23e): carrier: link connected Jul 19 12:38:37 managed-node2 dnsmasq[29802]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 2 addresses Jul 19 12:38:37 managed-node2 systemd[1]: Started libcontainer container 8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6. -- Subject: Unit libpod-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:37 managed-node2 systemd[1]: Started libcontainer container bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c. -- Subject: Unit libpod-bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:37 managed-node2 podman[31710]: Pod: Jul 19 12:38:37 managed-node2 podman[31710]: 78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf Jul 19 12:38:37 managed-node2 podman[31710]: Container: Jul 19 12:38:37 managed-node2 podman[31710]: bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c Jul 19 12:38:37 managed-node2 systemd[1]: Started A template for running K8s workloads via podman-kube-play. -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished starting up. -- -- The start-up result is done. Jul 19 12:38:38 managed-node2 sudo[32108]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dlblsunulyjazdrjwbxpqfeiydnfqnsr ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943118.083963-17014-206060369573326/AnsiballZ_command.py' Jul 19 12:38:38 managed-node2 sudo[32108]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:38:38 managed-node2 platform-python[32111]: ansible-command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:38 managed-node2 systemd[25528]: Started podman-32120.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:38 managed-node2 sudo[32108]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:38:38 managed-node2 platform-python[32258]: ansible-command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:39 managed-node2 platform-python[32389]: ansible-command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:39 managed-node2 sudo[32520]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-eeofqvmjjevxeutcqzjhxoeuxycorapq ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943119.4551659-17079-9632647473971/AnsiballZ_command.py' Jul 19 12:38:39 managed-node2 sudo[32520]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:38:39 managed-node2 platform-python[32523]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:39 managed-node2 sudo[32520]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:38:40 managed-node2 platform-python[32649]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:40 managed-node2 platform-python[32775]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:41 managed-node2 platform-python[32901]: ansible-uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:38:41 managed-node2 platform-python[33025]: ansible-uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:38:41 managed-node2 rsyslogd[1019]: imjournal: journal files changed, reloading... [v8.2102.0-15.el8 try https://www.rsyslog.com/e/0 ] Jul 19 12:38:41 managed-node2 platform-python[33150]: ansible-command Invoked with _raw_params=ls -alrtF /tmp/lsr_s72m6vdk_podman/httpd1-create warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:42 managed-node2 platform-python[33274]: ansible-command Invoked with _raw_params=ls -alrtF /tmp/lsr_s72m6vdk_podman/httpd2-create warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:42 managed-node2 platform-python[33398]: ansible-command Invoked with _raw_params=ls -alrtF /tmp/lsr_s72m6vdk_podman/httpd3-create warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:45 managed-node2 platform-python[33647]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:46 managed-node2 platform-python[33775]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:38:49 managed-node2 platform-python[33900]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:38:52 managed-node2 platform-python[34023]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 19 12:38:52 managed-node2 platform-python[34150]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 19 12:38:53 managed-node2 platform-python[34277]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 19 12:38:55 managed-node2 platform-python[34400]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:38:58 managed-node2 platform-python[34523]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:39:01 managed-node2 platform-python[34646]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:39:03 managed-node2 platform-python[34769]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 19 12:39:05 managed-node2 platform-python[34930]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 19 12:39:06 managed-node2 platform-python[35053]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 19 12:39:10 managed-node2 platform-python[35176]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jul 19 12:39:11 managed-node2 platform-python[35300]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:11 managed-node2 platform-python[35425]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:12 managed-node2 platform-python[35549]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:13 managed-node2 platform-python[35673]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:14 managed-node2 platform-python[35797]: ansible-command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jul 19 12:39:14 managed-node2 platform-python[35920]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:15 managed-node2 platform-python[36043]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:15 managed-node2 sudo[36166]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wbomjbspvcxjkqucdsequagxlcclxnab ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943155.3657928-18727-255969634455656/AnsiballZ_podman_image.py' Jul 19 12:39:15 managed-node2 sudo[36166]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:39:15 managed-node2 systemd[25528]: Started podman-36171.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:39:15 managed-node2 systemd[25528]: Started podman-36179.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:39:15 managed-node2 systemd[25528]: Started podman-36187.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:39:15 managed-node2 systemd[25528]: Started podman-36195.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:39:15 managed-node2 systemd[25528]: Started podman-36203.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:39:16 managed-node2 systemd[25528]: Started podman-36211.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:39:16 managed-node2 sudo[36166]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:39:16 managed-node2 platform-python[36340]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:17 managed-node2 platform-python[36465]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:17 managed-node2 platform-python[36588]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 19 12:39:17 managed-node2 platform-python[36652]: ansible-file Invoked with owner=podman_basic_user group=3001 mode=0644 dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _original_basename=tmpvcvwgcl1 recurse=False state=file path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:18 managed-node2 sudo[36775]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mwgfktigaxpvesxpytdoaduxtkxpnkwn ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943158.1234493-18828-35756619876473/AnsiballZ_podman_play.py' Jul 19 12:39:18 managed-node2 sudo[36775]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:39:18 managed-node2 platform-python[36778]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:39:18 managed-node2 systemd[25528]: Started podman-36786.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:39:18 managed-node2 platform-python[36778]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jul 19 12:39:18 managed-node2 platform-python[36778]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Jul 19 12:39:18 managed-node2 platform-python[36778]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-07-19T12:39:18-04:00" level=info msg="/bin/podman filtering at log level debug" time="2025-07-19T12:39:18-04:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-07-19T12:39:18-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-07-19T12:39:18-04:00" level=info msg="Using sqlite as database backend" time="2025-07-19T12:39:18-04:00" level=debug msg="Using graph driver overlay" time="2025-07-19T12:39:18-04:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2025-07-19T12:39:18-04:00" level=debug msg="Using run root /run/user/3001/containers" time="2025-07-19T12:39:18-04:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2025-07-19T12:39:18-04:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2025-07-19T12:39:18-04:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2025-07-19T12:39:18-04:00" level=debug msg="Using transient store: false" time="2025-07-19T12:39:18-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-07-19T12:39:18-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-19T12:39:18-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-19T12:39:18-04:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2025-07-19T12:39:18-04:00" level=debug msg="Cached value indicated that native-diff is usable" time="2025-07-19T12:39:18-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2025-07-19T12:39:18-04:00" level=debug msg="Initializing event backend file" time="2025-07-19T12:39:18-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-07-19T12:39:18-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-07-19T12:39:18-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-07-19T12:39:18-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-07-19T12:39:18-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-07-19T12:39:18-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-07-19T12:39:18-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-07-19T12:39:18-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" time="2025-07-19T12:39:18-04:00" level=info msg="Setting parallel job count to 7" time="2025-07-19T12:39:18-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-19 12:38:13.521272 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-07-19T12:39:18-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-07-19T12:39:18-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-19T12:39:18-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:39:18-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-19T12:39:18-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\"" time="2025-07-19T12:39:18-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-19T12:39:18-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63)" time="2025-07-19T12:39:18-04:00" level=debug msg="exporting opaque data as blob \"sha256:9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\"" time="2025-07-19T12:39:18-04:00" level=debug msg="Pod using bridge network mode" time="2025-07-19T12:39:18-04:00" level=debug msg="Got pod cgroup as /libpod_parent/1846ecb89dbdb057faef33ff14bed3ee782f5fffa65b2fd38248f39e0fe82c96" Error: adding pod to state: name "httpd1" is in use: pod already exists time="2025-07-19T12:39:18-04:00" level=debug msg="Shutting down engines" Jul 19 12:39:18 managed-node2 platform-python[36778]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125 Jul 19 12:39:18 managed-node2 sudo[36775]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:39:19 managed-node2 platform-python[36940]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 19 12:39:20 managed-node2 platform-python[37064]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:21 managed-node2 platform-python[37189]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:22 managed-node2 platform-python[37313]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:22 managed-node2 platform-python[37436]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:23 managed-node2 platform-python[37727]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:24 managed-node2 platform-python[37852]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:24 managed-node2 platform-python[37975]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 19 12:39:24 managed-node2 platform-python[38039]: ansible-file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd2.yml _original_basename=tmpoct5ap5y recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd2.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:25 managed-node2 platform-python[38162]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:39:25 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19.slice. -- Subject: Unit machine-libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19.slice has finished starting up. -- -- The start-up result is done. Jul 19 12:39:25 managed-node2 platform-python[38162]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Jul 19 12:39:25 managed-node2 platform-python[38162]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Jul 19 12:39:25 managed-node2 platform-python[38162]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-07-19T12:39:25-04:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2025-07-19T12:39:25-04:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-07-19T12:39:25-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-07-19T12:39:25-04:00" level=info msg="Using sqlite as database backend" time="2025-07-19T12:39:25-04:00" level=debug msg="Using graph driver overlay" time="2025-07-19T12:39:25-04:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2025-07-19T12:39:25-04:00" level=debug msg="Using run root /run/containers/storage" time="2025-07-19T12:39:25-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2025-07-19T12:39:25-04:00" level=debug msg="Using tmp dir /run/libpod" time="2025-07-19T12:39:25-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2025-07-19T12:39:25-04:00" level=debug msg="Using transient store: false" time="2025-07-19T12:39:25-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-07-19T12:39:25-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-19T12:39:25-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-19T12:39:25-04:00" level=debug msg="Cached value indicated that metacopy is being used" time="2025-07-19T12:39:25-04:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2025-07-19T12:39:25-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2025-07-19T12:39:25-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2025-07-19T12:39:25-04:00" level=debug msg="Initializing event backend file" time="2025-07-19T12:39:25-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-07-19T12:39:25-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-07-19T12:39:25-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-07-19T12:39:25-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-07-19T12:39:25-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-07-19T12:39:25-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-07-19T12:39:25-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-07-19T12:39:25-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" time="2025-07-19T12:39:25-04:00" level=info msg="Setting parallel job count to 7" time="2025-07-19T12:39:25-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-19 12:35:55.640649556 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-07-19T12:39:25-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-07-19T12:39:25-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-19T12:39:25-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:39:25-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-19T12:39:25-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\"" time="2025-07-19T12:39:25-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-19T12:39:25-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a)" time="2025-07-19T12:39:25-04:00" level=debug msg="exporting opaque data as blob \"sha256:fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\"" time="2025-07-19T12:39:25-04:00" level=debug msg="Pod using bridge network mode" time="2025-07-19T12:39:25-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19.slice for parent machine.slice and name libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19" time="2025-07-19T12:39:25-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19.slice" time="2025-07-19T12:39:25-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19.slice" Error: adding pod to state: name "httpd2" is in use: pod already exists time="2025-07-19T12:39:25-04:00" level=debug msg="Shutting down engines" Jul 19 12:39:25 managed-node2 platform-python[38162]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125 Jul 19 12:39:26 managed-node2 platform-python[38323]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:27 managed-node2 platform-python[38448]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:28 managed-node2 platform-python[38572]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:29 managed-node2 platform-python[38695]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:30 managed-node2 platform-python[38984]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:31 managed-node2 platform-python[39109]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:31 managed-node2 platform-python[39232]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 19 12:39:31 managed-node2 platform-python[39296]: ansible-file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd3.yml _original_basename=tmp8akhw61j recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd3.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:32 managed-node2 platform-python[39419]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:39:32 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_cb3b0a5bd9971330489767c2081b22911aea01140e5cd90be87bd649d4122b98.slice. -- Subject: Unit machine-libpod_pod_cb3b0a5bd9971330489767c2081b22911aea01140e5cd90be87bd649d4122b98.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_cb3b0a5bd9971330489767c2081b22911aea01140e5cd90be87bd649d4122b98.slice has finished starting up. -- -- The start-up result is done. Jul 19 12:39:33 managed-node2 sudo[39581]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hrgfwbfwhnrhozyrljgrwuftlafbcvgj ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943172.9515133-19598-262459141060618/AnsiballZ_command.py' Jul 19 12:39:33 managed-node2 sudo[39581]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:39:33 managed-node2 platform-python[39584]: ansible-command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:33 managed-node2 systemd[25528]: Started podman-39593.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:39:33 managed-node2 sudo[39581]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:39:33 managed-node2 platform-python[39723]: ansible-command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:34 managed-node2 platform-python[39854]: ansible-command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:34 managed-node2 sudo[39985]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kapxhijpsmqpbluyhztflsgdqehfqzsf ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943174.2658803-19637-151873258581255/AnsiballZ_command.py' Jul 19 12:39:34 managed-node2 sudo[39985]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:39:34 managed-node2 platform-python[39988]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:34 managed-node2 sudo[39985]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:39:34 managed-node2 platform-python[40114]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:35 managed-node2 platform-python[40240]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:35 managed-node2 platform-python[40366]: ansible-uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:36 managed-node2 platform-python[40490]: ansible-uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:36 managed-node2 platform-python[40614]: ansible-uri Invoked with url=http://localhost:15003/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:39 managed-node2 platform-python[40863]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:40 managed-node2 platform-python[40992]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:43 managed-node2 platform-python[41117]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jul 19 12:39:44 managed-node2 platform-python[41241]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:44 managed-node2 platform-python[41366]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:44 managed-node2 platform-python[41490]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:45 managed-node2 platform-python[41614]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:46 managed-node2 platform-python[41738]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:46 managed-node2 sudo[41863]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bsbqhhxxcupnkafozyypbjqjdnfplilf ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943186.6742442-20261-89535707061551/AnsiballZ_systemd.py' Jul 19 12:39:46 managed-node2 sudo[41863]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:39:47 managed-node2 platform-python[41866]: ansible-systemd Invoked with name= scope=user state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 19 12:39:47 managed-node2 systemd[25528]: Reloading. Jul 19 12:39:47 managed-node2 systemd[25528]: Stopping A template for running K8s workloads via podman-kube-play... -- Subject: Unit UNIT has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun shutting down. Jul 19 12:39:47 managed-node2 kernel: cni-podman1: port 1(veth24653eaf) entered disabled state Jul 19 12:39:47 managed-node2 kernel: device veth24653eaf left promiscuous mode Jul 19 12:39:47 managed-node2 kernel: cni-podman1: port 1(veth24653eaf) entered disabled state Jul 19 12:39:47 managed-node2 podman[41882]: Pods stopped: Jul 19 12:39:47 managed-node2 podman[41882]: 2d1d21944d4ca561d0a237f029182068a925bb49513d0d920c7ad527b530a87a Jul 19 12:39:47 managed-node2 podman[41882]: Pods removed: Jul 19 12:39:47 managed-node2 podman[41882]: 2d1d21944d4ca561d0a237f029182068a925bb49513d0d920c7ad527b530a87a Jul 19 12:39:47 managed-node2 podman[41882]: Secrets removed: Jul 19 12:39:47 managed-node2 podman[41882]: Volumes removed: Jul 19 12:39:47 managed-node2 systemd[25528]: Stopped A template for running K8s workloads via podman-kube-play. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 19 12:39:47 managed-node2 sudo[41863]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:39:48 managed-node2 platform-python[42156]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:48 managed-node2 sudo[42281]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rkhrtubvxxvbakhpbdfuonkadzawohqm ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943188.1900835-20342-278852314562176/AnsiballZ_podman_play.py' Jul 19 12:39:48 managed-node2 sudo[42281]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:39:48 managed-node2 platform-python[42284]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:39:48 managed-node2 platform-python[42284]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jul 19 12:39:48 managed-node2 systemd[25528]: Started podman-42292.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:39:48 managed-node2 platform-python[42284]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman kube play --down /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jul 19 12:39:48 managed-node2 platform-python[42284]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped: Pods removed: Secrets removed: Volumes removed: Jul 19 12:39:48 managed-node2 platform-python[42284]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: Jul 19 12:39:48 managed-node2 platform-python[42284]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jul 19 12:39:48 managed-node2 sudo[42281]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:39:48 managed-node2 platform-python[42421]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:50 managed-node2 platform-python[42544]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 19 12:39:50 managed-node2 platform-python[42668]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:51 managed-node2 platform-python[42793]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:52 managed-node2 platform-python[42917]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 19 12:39:52 managed-node2 systemd[1]: Reloading. Jul 19 12:39:52 managed-node2 systemd[1]: Stopping A template for running K8s workloads via podman-kube-play... -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun shutting down. Jul 19 12:39:52 managed-node2 systemd[1]: libpod-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.scope has successfully entered the 'dead' state. Jul 19 12:39:52 managed-node2 systemd[1]: libpod-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.scope: Consumed 33ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.scope completed and consumed the indicated resources. Jul 19 12:39:52 managed-node2 systemd[1]: libpod-d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.scope has successfully entered the 'dead' state. Jul 19 12:39:52 managed-node2 systemd[1]: libpod-d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.scope: Consumed 33ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.scope completed and consumed the indicated resources. Jul 19 12:39:52 managed-node2 dnsmasq[29802]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Jul 19 12:39:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay-1ac1165dcb590ce00bffba4600c63f5cfb3b70afb8f380b4edeace6635fcdfe3-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-1ac1165dcb590ce00bffba4600c63f5cfb3b70afb8f380b4edeace6635fcdfe3-merged.mount has successfully entered the 'dead' state. Jul 19 12:39:52 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered disabled state Jul 19 12:39:52 managed-node2 kernel: device vetha38befe0 left promiscuous mode Jul 19 12:39:52 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered disabled state Jul 19 12:39:52 managed-node2 systemd[1]: run-netns-netns\x2d85ef15c4\x2d2df7\x2d918e\x2d907f\x2dc88b265faa98.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-netns-netns\x2d85ef15c4\x2d2df7\x2d918e\x2d907f\x2dc88b265faa98.mount has successfully entered the 'dead' state. Jul 19 12:39:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3-userdata-shm.mount has successfully entered the 'dead' state. Jul 19 12:39:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay-38daa2f903ec0433792b188cb05d307d74de74874667479598255b129c8e533b-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-38daa2f903ec0433792b188cb05d307d74de74874667479598255b129c8e533b-merged.mount has successfully entered the 'dead' state. Jul 19 12:39:52 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice. -- Subject: Unit machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice has finished shutting down. Jul 19 12:39:52 managed-node2 systemd[1]: machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice: Consumed 66ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice completed and consumed the indicated resources. Jul 19 12:39:52 managed-node2 podman[42953]: Pods stopped: Jul 19 12:39:52 managed-node2 podman[42953]: 63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7 Jul 19 12:39:52 managed-node2 podman[42953]: Pods removed: Jul 19 12:39:52 managed-node2 podman[42953]: 63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7 Jul 19 12:39:52 managed-node2 podman[42953]: Secrets removed: Jul 19 12:39:52 managed-node2 podman[42953]: Volumes removed: Jul 19 12:39:52 managed-node2 systemd[1]: libpod-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.scope has successfully entered the 'dead' state. Jul 19 12:39:52 managed-node2 systemd[1]: libpod-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.scope: Consumed 34ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.scope completed and consumed the indicated resources. Jul 19 12:39:53 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a-userdata-shm.mount has successfully entered the 'dead' state. Jul 19 12:39:53 managed-node2 dnsmasq[29802]: exiting on receipt of SIGTERM Jul 19 12:39:53 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has successfully entered the 'dead' state. Jul 19 12:39:53 managed-node2 systemd[1]: Stopped A template for running K8s workloads via podman-kube-play. -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished shutting down. Jul 19 12:39:53 managed-node2 platform-python[43230]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:53 managed-node2 systemd[1]: var-lib-containers-storage-overlay-946e50296936b22c6a0cd6493841882848a8040824e6c32355272e3fbcd82469-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-946e50296936b22c6a0cd6493841882848a8040824e6c32355272e3fbcd82469-merged.mount has successfully entered the 'dead' state. Jul 19 12:39:53 managed-node2 platform-python[43355]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:39:53 managed-node2 platform-python[43355]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/httpd2.yml Jul 19 12:39:53 managed-node2 platform-python[43355]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman kube play --down /etc/containers/ansible-kubernetes.d/httpd2.yml Jul 19 12:39:53 managed-node2 platform-python[43355]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped: Pods removed: Secrets removed: Volumes removed: Jul 19 12:39:53 managed-node2 platform-python[43355]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: Jul 19 12:39:53 managed-node2 platform-python[43355]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jul 19 12:39:54 managed-node2 platform-python[43492]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:55 managed-node2 platform-python[43615]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:56 managed-node2 platform-python[43740]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:57 managed-node2 platform-python[43864]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 19 12:39:57 managed-node2 systemd[1]: Reloading. Jul 19 12:39:57 managed-node2 systemd[1]: Stopping A template for running K8s workloads via podman-kube-play... -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun shutting down. Jul 19 12:39:57 managed-node2 systemd[1]: libpod-bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.scope has successfully entered the 'dead' state. Jul 19 12:39:57 managed-node2 systemd[1]: libpod-bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.scope: Consumed 33ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.scope completed and consumed the indicated resources. Jul 19 12:39:57 managed-node2 systemd[1]: libpod-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.scope has successfully entered the 'dead' state. Jul 19 12:39:57 managed-node2 systemd[1]: libpod-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.scope: Consumed 33ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.scope completed and consumed the indicated resources. Jul 19 12:39:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay-e4d65582a94e2bdc8a1eaad3e5573271c39b373e604383029d5c678d2ac244a1-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-e4d65582a94e2bdc8a1eaad3e5573271c39b373e604383029d5c678d2ac244a1-merged.mount has successfully entered the 'dead' state. Jul 19 12:39:57 managed-node2 kernel: cni-podman1: port 2(vetha6d4d23e) entered disabled state Jul 19 12:39:57 managed-node2 kernel: device vetha6d4d23e left promiscuous mode Jul 19 12:39:57 managed-node2 kernel: cni-podman1: port 2(vetha6d4d23e) entered disabled state Jul 19 12:39:57 managed-node2 systemd[1]: run-netns-netns\x2d4c7240ed\x2da995\x2deb80\x2d20f7\x2d420676ba3f43.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-netns-netns\x2d4c7240ed\x2da995\x2deb80\x2d20f7\x2d420676ba3f43.mount has successfully entered the 'dead' state. Jul 19 12:39:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6-userdata-shm.mount has successfully entered the 'dead' state. Jul 19 12:39:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay-9dcce2629a28829a7d47e45b786a91b8326ac6500c27209769c1539d9f082e74-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-9dcce2629a28829a7d47e45b786a91b8326ac6500c27209769c1539d9f082e74-merged.mount has successfully entered the 'dead' state. Jul 19 12:39:57 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice. -- Subject: Unit machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice has finished shutting down. Jul 19 12:39:57 managed-node2 systemd[1]: machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice: Consumed 67ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice completed and consumed the indicated resources. Jul 19 12:39:58 managed-node2 podman[43900]: Pods stopped: Jul 19 12:39:58 managed-node2 podman[43900]: 78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf Jul 19 12:39:58 managed-node2 podman[43900]: Pods removed: Jul 19 12:39:58 managed-node2 podman[43900]: 78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf Jul 19 12:39:58 managed-node2 podman[43900]: Secrets removed: Jul 19 12:39:58 managed-node2 podman[43900]: Volumes removed: Jul 19 12:39:58 managed-node2 systemd[1]: libpod-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.scope has successfully entered the 'dead' state. Jul 19 12:39:58 managed-node2 systemd[1]: libpod-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.scope: Consumed 33ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.scope completed and consumed the indicated resources. Jul 19 12:39:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c-userdata-shm.mount has successfully entered the 'dead' state. Jul 19 12:39:58 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has successfully entered the 'dead' state. Jul 19 12:39:58 managed-node2 systemd[1]: Stopped A template for running K8s workloads via podman-kube-play. -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished shutting down. Jul 19 12:39:58 managed-node2 platform-python[44169]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay-c944fbd43e1673bcb0e2412bde6d753cffca05c01ef505aa29441df09b37e4f0-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-c944fbd43e1673bcb0e2412bde6d753cffca05c01ef505aa29441df09b37e4f0-merged.mount has successfully entered the 'dead' state. Jul 19 12:39:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 19 12:39:58 managed-node2 platform-python[44294]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:39:58 managed-node2 platform-python[44294]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/httpd3.yml Jul 19 12:39:59 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 19 12:39:59 managed-node2 platform-python[44430]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:40:00 managed-node2 platform-python[44553]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None Jul 19 12:40:01 managed-node2 platform-python[44677]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:01 managed-node2 sudo[44802]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zjfnlsrhavffrrytzlezjdprwiglzjsy ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943201.3476377-20989-80415109164671/AnsiballZ_podman_container_info.py' Jul 19 12:40:01 managed-node2 sudo[44802]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:40:01 managed-node2 platform-python[44805]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None Jul 19 12:40:01 managed-node2 systemd[25528]: Started podman-44807.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:40:01 managed-node2 sudo[44802]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:40:02 managed-node2 sudo[44936]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kqzzrdenthgaimpshaehnjtpvgbskyzt ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943201.9272537-21012-43022464511697/AnsiballZ_command.py' Jul 19 12:40:02 managed-node2 sudo[44936]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:40:02 managed-node2 platform-python[44939]: ansible-command Invoked with _raw_params=podman network ls -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:02 managed-node2 systemd[25528]: Started podman-44941.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:40:02 managed-node2 sudo[44936]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:40:02 managed-node2 sudo[45096]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wfpjnxfzctwnxksvtryonkmxvjvkhduq ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943202.41182-21042-156572471435781/AnsiballZ_command.py' Jul 19 12:40:02 managed-node2 sudo[45096]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:40:02 managed-node2 platform-python[45099]: ansible-command Invoked with _raw_params=podman secret ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:02 managed-node2 systemd[25528]: Started podman-45101.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:40:02 managed-node2 sudo[45096]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:40:03 managed-node2 platform-python[45230]: ansible-command Invoked with removes=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl disable-linger podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None stdin=None Jul 19 12:40:03 managed-node2 systemd[1]: Stopping User Manager for UID 3001... -- Subject: Unit user@3001.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@3001.service has begun shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Stopped target Default. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Stopping D-Bus User Message Bus... -- Subject: Unit UNIT has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Removed slice podman\x2dkube.slice. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Stopping podman-pause-1458d7a0.scope. -- Subject: Unit UNIT has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Stopped D-Bus User Message Bus. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Stopped target Basic System. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Stopped target Timers. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Stopped Mark boot as successful after the user session has run 2 minutes. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Stopped target Sockets. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Stopped target Paths. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Closed D-Bus User Message Bus Socket. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Stopped podman-pause-1458d7a0.scope. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Removed slice user.slice. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Reached target Shutdown. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:40:03 managed-node2 systemd[25528]: Started Exit the Session. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:40:03 managed-node2 systemd[25528]: Reached target Exit the Session. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:40:03 managed-node2 systemd[1]: user@3001.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit user@3001.service has successfully entered the 'dead' state. Jul 19 12:40:03 managed-node2 systemd[1]: Stopped User Manager for UID 3001. -- Subject: Unit user@3001.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@3001.service has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[1]: Stopping User runtime directory /run/user/3001... -- Subject: Unit user-runtime-dir@3001.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@3001.service has begun shutting down. Jul 19 12:40:03 managed-node2 systemd[1]: run-user-3001.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-user-3001.mount has successfully entered the 'dead' state. Jul 19 12:40:03 managed-node2 systemd[1]: user-runtime-dir@3001.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit user-runtime-dir@3001.service has successfully entered the 'dead' state. Jul 19 12:40:03 managed-node2 systemd[1]: Stopped User runtime directory /run/user/3001. -- Subject: Unit user-runtime-dir@3001.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@3001.service has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[1]: Removed slice User Slice of UID 3001. -- Subject: Unit user-3001.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-3001.slice has finished shutting down. Jul 19 12:40:03 managed-node2 platform-python[45362]: ansible-command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:04 managed-node2 sudo[45486]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dlpvabpxjyeqjdweosdqhelprmnnraei ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943204.1405673-21146-194288526342265/AnsiballZ_command.py' Jul 19 12:40:04 managed-node2 sudo[45486]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:40:04 managed-node2 platform-python[45489]: ansible-command Invoked with _raw_params=podman pod exists httpd1 warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:04 managed-node2 sudo[45486]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:40:04 managed-node2 platform-python[45619]: ansible-command Invoked with _raw_params=podman pod exists httpd2 warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:05 managed-node2 platform-python[45749]: ansible-command Invoked with _raw_params=podman pod exists httpd3 warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:05 managed-node2 sudo[45879]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uhnqljkrbofbppqpfvsxbbfqaxcbzbnz ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943205.3022838-21198-91872805319796/AnsiballZ_command.py' Jul 19 12:40:05 managed-node2 sudo[45879]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:40:05 managed-node2 platform-python[45882]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:05 managed-node2 sudo[45879]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:40:05 managed-node2 platform-python[46008]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:06 managed-node2 platform-python[46134]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:06 managed-node2 platform-python[46260]: ansible-stat Invoked with path=/var/lib/systemd/linger/podman_basic_user follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:09 managed-node2 platform-python[46508]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:10 managed-node2 platform-python[46637]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 19 12:40:11 managed-node2 platform-python[46761]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:13 managed-node2 platform-python[46886]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jul 19 12:40:14 managed-node2 platform-python[47010]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:14 managed-node2 platform-python[47135]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:15 managed-node2 platform-python[47259]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:16 managed-node2 platform-python[47383]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:16 managed-node2 platform-python[47507]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:17 managed-node2 platform-python[47630]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:17 managed-node2 platform-python[47753]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:40:19 managed-node2 platform-python[47876]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 19 12:40:19 managed-node2 platform-python[48000]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:20 managed-node2 platform-python[48125]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:21 managed-node2 platform-python[48249]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 19 12:40:22 managed-node2 platform-python[48376]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:22 managed-node2 platform-python[48499]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:40:23 managed-node2 platform-python[48622]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:25 managed-node2 platform-python[48747]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:25 managed-node2 platform-python[48871]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 19 12:40:26 managed-node2 platform-python[48998]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:26 managed-node2 platform-python[49121]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:40:28 managed-node2 platform-python[49244]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None Jul 19 12:40:28 managed-node2 platform-python[49368]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:29 managed-node2 platform-python[49491]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:40:29 managed-node2 platform-python[49614]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:40:32 managed-node2 platform-python[49776]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version', 'os_family'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d Jul 19 12:40:33 managed-node2 platform-python[49903]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:33 managed-node2 platform-python[50026]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:36 managed-node2 platform-python[50274]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:37 managed-node2 platform-python[50403]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 19 12:40:37 managed-node2 platform-python[50527]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:41 managed-node2 platform-python[50691]: ansible-setup Invoked with gather_subset=['all'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d Jul 19 12:40:44 managed-node2 platform-python[50843]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:45 managed-node2 platform-python[50966]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:47 managed-node2 platform-python[51214]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:48 managed-node2 platform-python[51343]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 19 12:40:48 managed-node2 platform-python[51467]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:54 managed-node2 platform-python[51631]: ansible-setup Invoked with gather_subset=['all'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d Jul 19 12:40:54 managed-node2 platform-python[51783]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:55 managed-node2 platform-python[51906]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:56 managed-node2 platform-python[52030]: ansible-dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:40:59 managed-node2 platform-python[52158]: ansible-dnf Invoked with name=['certmonger'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:41:02 managed-node2 dbus-daemon[591]: [system] Reloaded configuration Jul 19 12:41:02 managed-node2 dbus-daemon[591]: [system] Reloaded configuration Jul 19 12:41:02 managed-node2 dbus-daemon[591]: [system] Reloaded configuration Jul 19 12:41:02 managed-node2 dbus-daemon[591]: [system] Reloaded configuration Jul 19 12:41:02 managed-node2 dbus-daemon[591]: [system] Reloaded configuration Jul 19 12:41:02 managed-node2 systemd[1]: Reloading. Jul 19 12:41:02 managed-node2 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. -- Subject: Unit run-r2a2b61b169e54534b6bc9888468488f1.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit run-r2a2b61b169e54534b6bc9888468488f1.service has finished starting up. -- -- The start-up result is done. Jul 19 12:41:02 managed-node2 systemd[1]: Starting man-db-cache-update.service... -- Subject: Unit man-db-cache-update.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has begun starting up. Jul 19 12:41:02 managed-node2 systemd[1]: Reloading. Jul 19 12:41:03 managed-node2 systemd[1]: man-db-cache-update.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit man-db-cache-update.service has successfully entered the 'dead' state. Jul 19 12:41:03 managed-node2 systemd[1]: Started man-db-cache-update.service. -- Subject: Unit man-db-cache-update.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has finished starting up. -- -- The start-up result is done. Jul 19 12:41:03 managed-node2 systemd[1]: run-r2a2b61b169e54534b6bc9888468488f1.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-r2a2b61b169e54534b6bc9888468488f1.service has successfully entered the 'dead' state. Jul 19 12:41:04 managed-node2 platform-python[52790]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:41:04 managed-node2 platform-python[52913]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:41:05 managed-node2 platform-python[53036]: ansible-systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 19 12:41:05 managed-node2 systemd[1]: Reloading. Jul 19 12:41:05 managed-node2 systemd[1]: Starting dnf makecache... -- Subject: Unit dnf-makecache.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit dnf-makecache.service has begun starting up. Jul 19 12:41:05 managed-node2 systemd[1]: Starting Certificate monitoring and PKI enrollment... -- Subject: Unit certmonger.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit certmonger.service has begun starting up. Jul 19 12:41:05 managed-node2 systemd[1]: Started Certificate monitoring and PKI enrollment. -- Subject: Unit certmonger.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit certmonger.service has finished starting up. -- -- The start-up result is done. Jul 19 12:41:05 managed-node2 dnf[53070]: Failed determining last makecache time. Jul 19 12:41:05 managed-node2 dnf[53070]: CentOS Stream 8 - AppStream 124 kB/s | 4.4 kB 00:00 Jul 19 12:41:05 managed-node2 dnf[53070]: CentOS Stream 8 - BaseOS 100 kB/s | 3.9 kB 00:00 Jul 19 12:41:06 managed-node2 dnf[53070]: CentOS Stream 8 - Extras 83 kB/s | 2.9 kB 00:00 Jul 19 12:41:06 managed-node2 platform-python[53233]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=# # Ansible managed # # system_role:certificate booted=True provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None Jul 19 12:41:06 managed-node2 dnf[53070]: CentOS Stream 8 - Extras common packages 74 kB/s | 3.0 kB 00:00 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 dnf[53070]: CentOS Stream 8 - HighAvailability 36 kB/s | 3.9 kB 00:00 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53258]: Certificate in file "/etc/pki/tls/certs/quadlet_demo.crt" issued by CA and saved. Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 dnf[53070]: Beaker Client - RedHatEnterpriseLinux8 8.8 kB/s | 1.5 kB 00:00 Jul 19 12:41:06 managed-node2 dnf[53070]: Beaker harness 13 kB/s | 1.3 kB 00:00 Jul 19 12:41:06 managed-node2 dnf[53070]: Copr repo for beakerlib-libraries owned by bgon 13 kB/s | 1.8 kB 00:00 Jul 19 12:41:06 managed-node2 dnf[53070]: Extra Packages for Enterprise Linux 8 - x86_64 413 kB/s | 35 kB 00:00 Jul 19 12:41:06 managed-node2 platform-python[53383]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Jul 19 12:41:07 managed-node2 dnf[53070]: Extra Packages for Enterprise Linux 8 - x86_64 281 kB/s | 21 kB 00:00 Jul 19 12:41:07 managed-node2 platform-python[53508]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key Jul 19 12:41:07 managed-node2 dnf[53070]: Extra Packages for Enterprise Linux 8 - x86_64 81 kB/s | 25 kB 00:00 Jul 19 12:41:07 managed-node2 dnf[53070]: Copr repo for qa-tools owned by lpol 33 kB/s | 1.8 kB 00:00 Jul 19 12:41:07 managed-node2 platform-python[53632]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Jul 19 12:41:08 managed-node2 platform-python[53755]: ansible-command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:41:08 managed-node2 certmonger[53073]: 2025-07-19 12:41:08 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:08 managed-node2 platform-python[53879]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:41:09 managed-node2 platform-python[54002]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:41:09 managed-node2 platform-python[54125]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:41:09 managed-node2 dnf[53070]: Metadata cache created. Jul 19 12:41:10 managed-node2 systemd[1]: dnf-makecache.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit dnf-makecache.service has successfully entered the 'dead' state. Jul 19 12:41:10 managed-node2 systemd[1]: Started dnf makecache. -- Subject: Unit dnf-makecache.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit dnf-makecache.service has finished starting up. -- -- The start-up result is done. Jul 19 12:41:10 managed-node2 platform-python[54249]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:41:10 managed-node2 platform-python[54372]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:41:13 managed-node2 platform-python[54620]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:41:14 managed-node2 platform-python[54749]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 19 12:41:14 managed-node2 platform-python[54873]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:41:16 managed-node2 platform-python[54998]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:41:17 managed-node2 platform-python[55121]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:41:17 managed-node2 platform-python[55244]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:41:18 managed-node2 platform-python[55368]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:41:21 managed-node2 platform-python[55491]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 19 12:41:21 managed-node2 platform-python[55618]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 19 12:41:22 managed-node2 platform-python[55745]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 19 12:41:23 managed-node2 platform-python[55868]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 19 12:41:25 managed-node2 platform-python[55991]: ansible-command Invoked with _raw_params=journalctl -ex warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None TASK [Check] ******************************************************************* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:148 Saturday 19 July 2025 12:41:25 -0400 (0:00:00.491) 0:00:32.088 ********* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "-a" ], "delta": "0:00:00.071215", "end": "2025-07-19 12:41:25.778053", "rc": 0, "start": "2025-07-19 12:41:25.706838" } STDOUT: CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES TASK [Check pods] ************************************************************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:152 Saturday 19 July 2025 12:41:25 -0400 (0:00:00.445) 0:00:32.534 ********* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "pod", "ps", "--ctr-ids", "--ctr-names", "--ctr-status" ], "delta": "0:00:00.034183", "end": "2025-07-19 12:41:26.156999", "failed_when_result": false, "rc": 0, "start": "2025-07-19 12:41:26.122816" } STDOUT: POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS TASK [Check systemd] *********************************************************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:157 Saturday 19 July 2025 12:41:26 -0400 (0:00:00.411) 0:00:32.946 ********* ok: [managed-node2] => { "changed": false, "cmd": "set -euo pipefail; systemctl list-units --all | grep quadlet", "delta": "0:00:00.010698", "end": "2025-07-19 12:41:26.574359", "failed_when_result": false, "rc": 1, "start": "2025-07-19 12:41:26.563661" } MSG: non-zero return code TASK [LS] ********************************************************************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:165 Saturday 19 July 2025 12:41:26 -0400 (0:00:00.382) 0:00:33.329 ********* ok: [managed-node2] => { "changed": false, "cmd": [ "ls", "-alrtF", "/etc/systemd/system" ], "delta": "0:00:00.004014", "end": "2025-07-19 12:41:26.919317", "failed_when_result": false, "rc": 0, "start": "2025-07-19 12:41:26.915303" } STDOUT: total 8 lrwxrwxrwx. 1 root root 9 May 11 2019 systemd-timedated.service -> /dev/null drwxr-xr-x. 4 root root 169 May 29 2024 ../ lrwxrwxrwx. 1 root root 39 May 29 2024 syslog.service -> /usr/lib/systemd/system/rsyslog.service drwxr-xr-x. 2 root root 32 May 29 2024 getty.target.wants/ lrwxrwxrwx. 1 root root 37 May 29 2024 ctrl-alt-del.target -> /usr/lib/systemd/system/reboot.target lrwxrwxrwx. 1 root root 57 May 29 2024 dbus-org.freedesktop.nm-dispatcher.service -> /usr/lib/systemd/system/NetworkManager-dispatcher.service drwxr-xr-x. 2 root root 48 May 29 2024 network-online.target.wants/ lrwxrwxrwx. 1 root root 41 May 29 2024 dbus-org.freedesktop.timedate1.service -> /usr/lib/systemd/system/timedatex.service drwxr-xr-x. 2 root root 61 May 29 2024 timers.target.wants/ drwxr-xr-x. 2 root root 31 May 29 2024 basic.target.wants/ drwxr-xr-x. 2 root root 38 May 29 2024 dev-virtio\x2dports-org.qemu.guest_agent.0.device.wants/ lrwxrwxrwx. 1 root root 41 May 29 2024 default.target -> /usr/lib/systemd/system/multi-user.target drwxr-xr-x. 2 root root 51 May 29 2024 sockets.target.wants/ drwxr-xr-x. 2 root root 31 May 29 2024 remote-fs.target.wants/ drwxr-xr-x. 2 root root 59 May 29 2024 sshd-keygen@.service.d/ drwxr-xr-x. 2 root root 119 May 29 2024 cloud-init.target.wants/ drwxr-xr-x. 2 root root 181 May 29 2024 sysinit.target.wants/ lrwxrwxrwx. 1 root root 41 Jul 19 12:35 dbus-org.fedoraproject.FirewallD1.service -> /usr/lib/systemd/system/firewalld.service drwxr-xr-x. 13 root root 4096 Jul 19 12:39 ./ drwxr-xr-x. 2 root root 4096 Jul 19 12:41 multi-user.target.wants/ TASK [Cleanup] ***************************************************************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:172 Saturday 19 July 2025 12:41:26 -0400 (0:00:00.351) 0:00:33.680 ********* TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 19 July 2025 12:41:27 -0400 (0:00:00.082) 0:00:33.762 ********* included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 19 July 2025 12:41:27 -0400 (0:00:00.062) 0:00:33.825 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 19 July 2025 12:41:27 -0400 (0:00:00.054) 0:00:33.880 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 19 July 2025 12:41:27 -0400 (0:00:00.031) 0:00:33.911 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 19 July 2025 12:41:27 -0400 (0:00:00.032) 0:00:33.943 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 19 July 2025 12:41:27 -0400 (0:00:00.030) 0:00:33.974 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 19 July 2025 12:41:27 -0400 (0:00:00.034) 0:00:34.008 ********* ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node2] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 19 July 2025 12:41:27 -0400 (0:00:00.117) 0:00:34.125 ********* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 19 July 2025 12:41:28 -0400 (0:00:01.461) 0:00:35.587 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 19 July 2025 12:41:28 -0400 (0:00:00.032) 0:00:35.620 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 19 July 2025 12:41:28 -0400 (0:00:00.039) 0:00:35.659 ********* skipping: [managed-node2] => {} TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 19 July 2025 12:41:28 -0400 (0:00:00.031) 0:00:35.690 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 19 July 2025 12:41:29 -0400 (0:00:00.031) 0:00:35.721 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 19 July 2025 12:41:29 -0400 (0:00:00.032) 0:00:35.753 ********* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.026302", "end": "2025-07-19 12:41:29.375025", "rc": 0, "start": "2025-07-19 12:41:29.348723" } STDOUT: podman version 4.9.4-dev TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 19 July 2025 12:41:29 -0400 (0:00:00.378) 0:00:36.132 ********* ok: [managed-node2] => { "ansible_facts": { "podman_version": "4.9.4-dev" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 19 July 2025 12:41:29 -0400 (0:00:00.033) 0:00:36.166 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 19 July 2025 12:41:29 -0400 (0:00:00.038) 0:00:36.205 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 19 July 2025 12:41:29 -0400 (0:00:00.058) 0:00:36.263 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 19 July 2025 12:41:29 -0400 (0:00:00.090) 0:00:36.354 ********* included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 19 July 2025 12:41:29 -0400 (0:00:00.090) 0:00:36.444 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 19 July 2025 12:41:29 -0400 (0:00:00.039) 0:00:36.483 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 19 July 2025 12:41:29 -0400 (0:00:00.039) 0:00:36.523 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 19 July 2025 12:41:29 -0400 (0:00:00.041) 0:00:36.564 ********* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1752942923.4985383, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1752942894.3494363, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986657, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "3031672287", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 19 July 2025 12:41:30 -0400 (0:00:00.354) 0:00:36.919 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 19 July 2025 12:41:30 -0400 (0:00:00.034) 0:00:36.953 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 19 July 2025 12:41:30 -0400 (0:00:00.032) 0:00:36.986 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 19 July 2025 12:41:30 -0400 (0:00:00.033) 0:00:37.019 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 19 July 2025 12:41:30 -0400 (0:00:00.031) 0:00:37.051 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 19 July 2025 12:41:30 -0400 (0:00:00.032) 0:00:37.083 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 19 July 2025 12:41:30 -0400 (0:00:00.031) 0:00:37.114 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 19 July 2025 12:41:30 -0400 (0:00:00.032) 0:00:37.147 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 19 July 2025 12:41:30 -0400 (0:00:00.031) 0:00:37.178 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Saturday 19 July 2025 12:41:30 -0400 (0:00:00.079) 0:00:37.258 ********* included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 19 July 2025 12:41:30 -0400 (0:00:00.061) 0:00:37.319 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 19 July 2025 12:41:30 -0400 (0:00:00.030) 0:00:37.350 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Saturday 19 July 2025 12:41:30 -0400 (0:00:00.061) 0:00:37.411 ********* included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 19 July 2025 12:41:30 -0400 (0:00:00.063) 0:00:37.475 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 19 July 2025 12:41:30 -0400 (0:00:00.031) 0:00:37.507 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Saturday 19 July 2025 12:41:30 -0400 (0:00:00.029) 0:00:37.537 ********* included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Saturday 19 July 2025 12:41:30 -0400 (0:00:00.064) 0:00:37.602 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Saturday 19 July 2025 12:41:30 -0400 (0:00:00.030) 0:00:37.632 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Saturday 19 July 2025 12:41:30 -0400 (0:00:00.030) 0:00:37.663 ********* included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Saturday 19 July 2025 12:41:31 -0400 (0:00:00.066) 0:00:37.729 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Saturday 19 July 2025 12:41:31 -0400 (0:00:00.031) 0:00:37.760 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Saturday 19 July 2025 12:41:31 -0400 (0:00:00.029) 0:00:37.790 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Saturday 19 July 2025 12:41:31 -0400 (0:00:00.030) 0:00:37.821 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Saturday 19 July 2025 12:41:31 -0400 (0:00:00.029) 0:00:37.851 ********* TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Saturday 19 July 2025 12:41:31 -0400 (0:00:00.096) 0:00:37.947 ********* included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Saturday 19 July 2025 12:41:31 -0400 (0:00:00.092) 0:00:38.040 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Saturday 19 July 2025 12:41:31 -0400 (0:00:00.039) 0:00:38.079 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Saturday 19 July 2025 12:41:31 -0400 (0:00:00.031) 0:00:38.111 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Saturday 19 July 2025 12:41:31 -0400 (0:00:00.030) 0:00:38.141 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Saturday 19 July 2025 12:41:31 -0400 (0:00:00.031) 0:00:38.172 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Run systemctl] ********************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:34 Saturday 19 July 2025 12:41:31 -0400 (0:00:00.029) 0:00:38.201 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Require installed systemd] ********** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:41 Saturday 19 July 2025 12:41:31 -0400 (0:00:00.037) 0:00:38.239 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:46 Saturday 19 July 2025 12:41:31 -0400 (0:00:00.032) 0:00:38.271 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:51 Saturday 19 July 2025 12:41:31 -0400 (0:00:00.029) 0:00:38.301 ********* ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: firewalld TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:63 Saturday 19 July 2025 12:41:34 -0400 (0:00:02.811) 0:00:41.113 ********* skipping: [managed-node2] => {} TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:68 Saturday 19 July 2025 12:41:34 -0400 (0:00:00.033) 0:00:41.147 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:73 Saturday 19 July 2025 12:41:34 -0400 (0:00:00.030) 0:00:41.178 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check which conflicting services are enabled] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Saturday 19 July 2025 12:41:34 -0400 (0:00:00.031) 0:00:41.209 ********* skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "item": "ufw", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:14 Saturday 19 July 2025 12:41:34 -0400 (0:00:00.040) 0:00:41.250 ********* skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'item': 'nftables', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "item": { "ansible_loop_var": "item", "changed": false, "item": "nftables", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'item': 'iptables', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "item": { "ansible_loop_var": "item", "changed": false, "item": "iptables", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'item': 'ufw', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "item": { "ansible_loop_var": "item", "changed": false, "item": "ufw", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:24 Saturday 19 July 2025 12:41:34 -0400 (0:00:00.053) 0:00:41.303 ********* ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "ActiveEnterTimestamp": "Sat 2025-07-19 12:35:31 EDT", "ActiveEnterTimestampMonotonic": "319052571", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "polkit.service dbus.service system.slice basic.target dbus.socket sysinit.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-07-19 12:35:30 EDT", "AssertTimestampMonotonic": "318194415", "Before": "network-pre.target multi-user.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-07-19 12:35:30 EDT", "ConditionTimestampMonotonic": "318194414", "ConfigurationDirectoryMode": "0755", "Conflicts": "iptables.service ipset.service ebtables.service ip6tables.service nftables.service shutdown.target", "ControlGroup": "/system.slice/firewalld.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "man:firewalld(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "12929", "ExecMainStartTimestamp": "Sat 2025-07-19 12:35:30 EDT", "ExecMainStartTimestampMonotonic": "318201406", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-07-19 12:35:30 EDT", "InactiveExitTimestampMonotonic": "318201438", "InvocationID": "1a69340d6e3c45249ec2c2f742d27736", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "12929", "MemoryAccounting": "yes", "MemoryCurrent": "43204608", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sat 2025-07-19 12:35:31 EDT", "StateChangeTimestampMonotonic": "319052571", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogTimestamp": "Sat 2025-07-19 12:35:31 EDT", "WatchdogTimestampMonotonic": "319052568", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:30 Saturday 19 July 2025 12:41:35 -0400 (0:00:00.483) 0:00:41.787 ********* ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "ActiveEnterTimestamp": "Sat 2025-07-19 12:35:31 EDT", "ActiveEnterTimestampMonotonic": "319052571", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "polkit.service dbus.service system.slice basic.target dbus.socket sysinit.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-07-19 12:35:30 EDT", "AssertTimestampMonotonic": "318194415", "Before": "network-pre.target multi-user.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-07-19 12:35:30 EDT", "ConditionTimestampMonotonic": "318194414", "ConfigurationDirectoryMode": "0755", "Conflicts": "iptables.service ipset.service ebtables.service ip6tables.service nftables.service shutdown.target", "ControlGroup": "/system.slice/firewalld.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "man:firewalld(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "12929", "ExecMainStartTimestamp": "Sat 2025-07-19 12:35:30 EDT", "ExecMainStartTimestampMonotonic": "318201406", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-07-19 12:35:30 EDT", "InactiveExitTimestampMonotonic": "318201438", "InvocationID": "1a69340d6e3c45249ec2c2f742d27736", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "12929", "MemoryAccounting": "yes", "MemoryCurrent": "43204608", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sat 2025-07-19 12:35:31 EDT", "StateChangeTimestampMonotonic": "319052571", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogTimestamp": "Sat 2025-07-19 12:35:31 EDT", "WatchdogTimestampMonotonic": "319052568", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:36 Saturday 19 July 2025 12:41:35 -0400 (0:00:00.482) 0:00:42.270 ********* ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/libexec/platform-python", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:45 Saturday 19 July 2025 12:41:35 -0400 (0:00:00.045) 0:00:42.315 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:58 Saturday 19 July 2025 12:41:35 -0400 (0:00:00.032) 0:00:42.348 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:74 Saturday 19 July 2025 12:41:35 -0400 (0:00:00.030) 0:00:42.378 ********* ok: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "8000/tcp", "state": "enabled" } } ok: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "9000/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:126 Saturday 19 July 2025 12:41:36 -0400 (0:00:01.156) 0:00:43.535 ********* skipping: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "item": { "port": "8000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "item": { "port": "9000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:137 Saturday 19 July 2025 12:41:36 -0400 (0:00:00.050) 0:00:43.585 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:146 Saturday 19 July 2025 12:41:36 -0400 (0:00:00.035) 0:00:43.620 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:152 Saturday 19 July 2025 12:41:36 -0400 (0:00:00.032) 0:00:43.653 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:161 Saturday 19 July 2025 12:41:36 -0400 (0:00:00.032) 0:00:43.685 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:172 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.030) 0:00:43.716 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:178 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.029) 0:00:43.745 ********* skipping: [managed-node2] => {} TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.030) 0:00:43.776 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.029) 0:00:43.805 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.029) 0:00:43.835 ********* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.029) 0:00:43.864 ********* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.028) 0:00:43.893 ********* included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.128) 0:00:44.021 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.033) 0:00:44.055 ********* included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.058) 0:00:44.113 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.036) 0:00:44.150 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.037) 0:00:44.187 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.041) 0:00:44.229 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.030) 0:00:44.260 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.031) 0:00:44.291 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.033) 0:00:44.325 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.034) 0:00:44.359 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.030) 0:00:44.390 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.030) 0:00:44.421 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.029) 0:00:44.450 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.030) 0:00:44.481 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.030) 0:00:44.511 ********* ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.039) 0:00:44.551 ********* included: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.059) 0:00:44.611 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 19 July 2025 12:41:37 -0400 (0:00:00.061) 0:00:44.673 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 19 July 2025 12:41:38 -0400 (0:00:00.032) 0:00:44.705 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 19 July 2025 12:41:38 -0400 (0:00:00.030) 0:00:44.736 ********* skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 19 July 2025 12:41:38 -0400 (0:00:00.031) 0:00:44.767 ********* fatal: [managed-node2]: FAILED! => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result" } TASK [Debug] ******************************************************************* task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:199 Saturday 19 July 2025 12:41:38 -0400 (0:00:00.035) 0:00:44.803 ********* ok: [managed-node2] => { "changed": false, "cmd": "exec 1>&2\nset -x\nset -o pipefail\nsystemctl list-units --plain -l --all | grep quadlet || :\nsystemctl list-unit-files --all | grep quadlet || :\nsystemctl list-units --plain --failed -l --all | grep quadlet || :\n", "delta": "0:00:00.376960", "end": "2025-07-19 12:41:38.770414", "rc": 0, "start": "2025-07-19 12:41:38.393454" } STDERR: + set -o pipefail + systemctl list-units --plain -l --all + grep quadlet + : + systemctl list-unit-files --all + grep quadlet + : + systemctl list-units --plain --failed -l --all + grep quadlet + : TASK [Get journald] ************************************************************ task path: /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:209 Saturday 19 July 2025 12:41:38 -0400 (0:00:00.722) 0:00:45.526 ********* fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.026359", "end": "2025-07-19 12:41:39.138418", "failed_when_result": true, "rc": 0, "start": "2025-07-19 12:41:39.112059" } STDOUT: -- Logs begin at Sat 2025-07-19 12:30:11 EDT, end at Sat 2025-07-19 12:41:39 EDT. -- Jul 19 12:35:33 managed-node2 platform-python[13361]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:35:34 managed-node2 platform-python[13484]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:35:37 managed-node2 platform-python[13607]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:35:39 managed-node2 platform-python[13730]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:35:42 managed-node2 dbus-daemon[591]: [system] Reloaded configuration Jul 19 12:35:42 managed-node2 dbus-daemon[591]: [system] Reloaded configuration Jul 19 12:35:42 managed-node2 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. -- Subject: Unit run-r58006eb3d48a46a9a552c0899f8af7ac.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit run-r58006eb3d48a46a9a552c0899f8af7ac.service has finished starting up. -- -- The start-up result is done. Jul 19 12:35:42 managed-node2 systemd[1]: Starting man-db-cache-update.service... -- Subject: Unit man-db-cache-update.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has begun starting up. Jul 19 12:35:42 managed-node2 systemd[1]: man-db-cache-update.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit man-db-cache-update.service has successfully entered the 'dead' state. Jul 19 12:35:42 managed-node2 systemd[1]: Started man-db-cache-update.service. -- Subject: Unit man-db-cache-update.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has finished starting up. -- -- The start-up result is done. Jul 19 12:35:42 managed-node2 systemd[1]: run-r58006eb3d48a46a9a552c0899f8af7ac.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-r58006eb3d48a46a9a552c0899f8af7ac.service has successfully entered the 'dead' state. Jul 19 12:35:43 managed-node2 platform-python[14335]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 19 12:35:43 managed-node2 platform-python[14483]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:35:45 managed-node2 platform-python[14607]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 19 12:35:46 managed-node2 kernel: SELinux: Converting 460 SID table entries... Jul 19 12:35:46 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Jul 19 12:35:46 managed-node2 kernel: SELinux: policy capability open_perms=1 Jul 19 12:35:46 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Jul 19 12:35:46 managed-node2 kernel: SELinux: policy capability always_check_network=0 Jul 19 12:35:46 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Jul 19 12:35:46 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jul 19 12:35:46 managed-node2 dbus-daemon[591]: [system] Reloaded configuration Jul 19 12:35:47 managed-node2 platform-python[14734]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 19 12:35:51 managed-node2 platform-python[14857]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:35:53 managed-node2 platform-python[14982]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:35:54 managed-node2 platform-python[15105]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:35:54 managed-node2 platform-python[15228]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 19 12:35:54 managed-node2 platform-python[15327]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/nopull.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752942954.3752043-9946-72044176595742/source _original_basename=tmpi7ylefvg follow=False checksum=d5dc917e3cae36de03aa971a17ac473f86fdf934 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Jul 19 12:35:55 managed-node2 platform-python[15452]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:35:55 managed-node2 kernel: evm: overlay not supported Jul 19 12:35:55 managed-node2 systemd[1]: var-lib-containers-storage-overlay-metacopy\x2dcheck103626253-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-metacopy\x2dcheck103626253-merged.mount has successfully entered the 'dead' state. Jul 19 12:35:55 managed-node2 systemd[1]: Created slice machine.slice. -- Subject: Unit machine.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine.slice has finished starting up. -- -- The start-up result is done. Jul 19 12:35:55 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice. -- Subject: Unit machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice has finished starting up. -- -- The start-up result is done. Jul 19 12:35:56 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 19 12:36:00 managed-node2 platform-python[15778]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:36:01 managed-node2 platform-python[15907]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:36:04 managed-node2 platform-python[16032]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:36:07 managed-node2 platform-python[16155]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 19 12:36:08 managed-node2 platform-python[16282]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 19 12:36:09 managed-node2 platform-python[16409]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 19 12:36:10 managed-node2 platform-python[16532]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:36:13 managed-node2 platform-python[16655]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:36:16 managed-node2 platform-python[16778]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:36:19 managed-node2 platform-python[16901]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 19 12:36:21 managed-node2 platform-python[17049]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 19 12:36:22 managed-node2 platform-python[17172]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 19 12:36:26 managed-node2 platform-python[17295]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:36:28 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 19 12:36:28 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 19 12:36:29 managed-node2 platform-python[17558]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:36:29 managed-node2 platform-python[17681]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:36:29 managed-node2 platform-python[17804]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 19 12:36:30 managed-node2 platform-python[17903]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/bogus.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752942989.6065838-11409-115866441513393/source _original_basename=tmpinaqg9cl follow=False checksum=f8266a972ed3be7e204d2a67883fe3a22b8dbf18 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Jul 19 12:36:30 managed-node2 platform-python[18028]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:36:30 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice. -- Subject: Unit machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice has finished starting up. -- -- The start-up result is done. Jul 19 12:36:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 19 12:36:34 managed-node2 platform-python[18315]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:36:35 managed-node2 platform-python[18444]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:36:37 managed-node2 platform-python[18569]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:36:40 managed-node2 platform-python[18692]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 19 12:36:41 managed-node2 platform-python[18819]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 19 12:36:41 managed-node2 platform-python[18946]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 19 12:36:43 managed-node2 platform-python[19069]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:36:46 managed-node2 platform-python[19192]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:36:49 managed-node2 platform-python[19315]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:36:52 managed-node2 platform-python[19438]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 19 12:36:54 managed-node2 platform-python[19586]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 19 12:36:54 managed-node2 platform-python[19709]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 19 12:36:59 managed-node2 platform-python[19832]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:37:00 managed-node2 platform-python[19957]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:37:01 managed-node2 platform-python[20081]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 19 12:37:01 managed-node2 platform-python[20208]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:37:02 managed-node2 platform-python[20333]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:37:02 managed-node2 platform-python[20333]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/nopull.yml Jul 19 12:37:02 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice. -- Subject: Unit machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice has finished shutting down. Jul 19 12:37:02 managed-node2 systemd[1]: machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice: Consumed 0 CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice completed and consumed the indicated resources. Jul 19 12:37:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 19 12:37:02 managed-node2 platform-python[20471]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:37:03 managed-node2 platform-python[20594]: ansible-command Invoked with _raw_params=podman image prune -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:37:06 managed-node2 platform-python[20849]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:37:07 managed-node2 platform-python[20978]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:37:10 managed-node2 platform-python[21103]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:37:13 managed-node2 platform-python[21226]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 19 12:37:14 managed-node2 platform-python[21353]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 19 12:37:15 managed-node2 platform-python[21480]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 19 12:37:16 managed-node2 platform-python[21603]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:37:19 managed-node2 platform-python[21726]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:37:22 managed-node2 platform-python[21849]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:37:25 managed-node2 platform-python[21972]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 19 12:37:27 managed-node2 platform-python[22120]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 19 12:37:28 managed-node2 platform-python[22243]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 19 12:37:32 managed-node2 platform-python[22366]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:37:34 managed-node2 platform-python[22491]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:37:34 managed-node2 platform-python[22615]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 19 12:37:35 managed-node2 platform-python[22742]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:37:35 managed-node2 platform-python[22867]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:37:35 managed-node2 platform-python[22867]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/bogus.yml Jul 19 12:37:35 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice. -- Subject: Unit machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice has finished shutting down. Jul 19 12:37:35 managed-node2 systemd[1]: machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice: Consumed 0 CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice completed and consumed the indicated resources. Jul 19 12:37:35 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 19 12:37:36 managed-node2 platform-python[23006]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:37:36 managed-node2 platform-python[23129]: ansible-command Invoked with _raw_params=podman image prune -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:37:40 managed-node2 platform-python[23384]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:37:41 managed-node2 platform-python[23513]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:37:44 managed-node2 platform-python[23638]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:37:47 managed-node2 platform-python[23761]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 19 12:37:47 managed-node2 platform-python[23888]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 19 12:37:48 managed-node2 platform-python[24015]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 19 12:37:50 managed-node2 platform-python[24138]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:37:52 managed-node2 platform-python[24261]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:37:55 managed-node2 platform-python[24384]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:37:58 managed-node2 platform-python[24507]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 19 12:38:00 managed-node2 platform-python[24655]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 19 12:38:01 managed-node2 platform-python[24778]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 19 12:38:05 managed-node2 platform-python[24901]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jul 19 12:38:06 managed-node2 platform-python[25025]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:38:06 managed-node2 platform-python[25150]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:06 managed-node2 platform-python[25274]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:08 managed-node2 platform-python[25398]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:08 managed-node2 platform-python[25522]: ansible-command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jul 19 12:38:08 managed-node2 systemd[1]: Created slice User Slice of UID 3001. -- Subject: Unit user-3001.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-3001.slice has finished starting up. -- -- The start-up result is done. Jul 19 12:38:08 managed-node2 systemd[1]: Starting User runtime directory /run/user/3001... -- Subject: Unit user-runtime-dir@3001.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@3001.service has begun starting up. Jul 19 12:38:08 managed-node2 systemd[1]: Started User runtime directory /run/user/3001. -- Subject: Unit user-runtime-dir@3001.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@3001.service has finished starting up. -- -- The start-up result is done. Jul 19 12:38:08 managed-node2 systemd[1]: Starting User Manager for UID 3001... -- Subject: Unit user@3001.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@3001.service has begun starting up. Jul 19 12:38:08 managed-node2 systemd[25528]: pam_unix(systemd-user:session): session opened for user podman_basic_user by (uid=0) Jul 19 12:38:08 managed-node2 systemd[25528]: Reached target Paths. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:08 managed-node2 systemd[25528]: Starting D-Bus User Message Bus Socket. -- Subject: Unit UNIT has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun starting up. Jul 19 12:38:08 managed-node2 systemd[25528]: Started Mark boot as successful after the user session has run 2 minutes. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:08 managed-node2 systemd[25528]: Reached target Timers. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:08 managed-node2 systemd[25528]: Listening on D-Bus User Message Bus Socket. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:08 managed-node2 systemd[25528]: Reached target Sockets. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:08 managed-node2 systemd[25528]: Reached target Basic System. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:08 managed-node2 systemd[25528]: Reached target Default. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:08 managed-node2 systemd[25528]: Startup finished in 28ms. -- Subject: User manager start-up is now complete -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The user manager instance for user 3001 has been started. All services queued -- for starting have been started. Note that other services might still be starting -- up or be started at any later time. -- -- Startup of the manager took 28712 microseconds. Jul 19 12:38:08 managed-node2 systemd[1]: Started User Manager for UID 3001. -- Subject: Unit user@3001.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@3001.service has finished starting up. -- -- The start-up result is done. Jul 19 12:38:09 managed-node2 platform-python[25663]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:38:10 managed-node2 platform-python[25786]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:38:10 managed-node2 sudo[25909]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ziadsxoqzpztrsztgsdmjtfdqrqqbghq ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943090.202407-15734-110385486361950/AnsiballZ_podman_image.py' Jul 19 12:38:10 managed-node2 sudo[25909]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:38:10 managed-node2 systemd[25528]: Started D-Bus User Message Bus. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:10 managed-node2 systemd[25528]: Created slice user.slice. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:10 managed-node2 systemd[25528]: Started podman-25921.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:10 managed-node2 systemd[25528]: Started podman-pause-1458d7a0.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:10 managed-node2 systemd[25528]: Started podman-25939.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:11 managed-node2 systemd[25528]: Started podman-25955.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:11 managed-node2 sudo[25909]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:38:11 managed-node2 platform-python[26084]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:38:12 managed-node2 platform-python[26207]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:38:12 managed-node2 platform-python[26330]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 19 12:38:13 managed-node2 platform-python[26429]: ansible-copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752943092.4331284-15846-26644570363473/source _original_basename=tmp0dg28w0o follow=False checksum=fe0b16bd085957dfbf8e2496934305469d165478 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Jul 19 12:38:13 managed-node2 sudo[26554]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-louqgipewhnyaovmbewiqaddtljctvmr ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943093.148976-15887-61767546226259/AnsiballZ_podman_play.py' Jul 19 12:38:13 managed-node2 sudo[26554]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:38:13 managed-node2 platform-python[26557]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:38:13 managed-node2 systemd[25528]: Started podman-26565.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:13 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6 Jul 19 12:38:13 managed-node2 systemd[25528]: Started rootless-netns-6ed4b4b3.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:13 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Jul 19 12:38:13 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vetha780888b: link is not ready Jul 19 12:38:13 managed-node2 kernel: cni-podman1: port 1(vetha780888b) entered blocking state Jul 19 12:38:13 managed-node2 kernel: cni-podman1: port 1(vetha780888b) entered disabled state Jul 19 12:38:13 managed-node2 kernel: device vetha780888b entered promiscuous mode Jul 19 12:38:13 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready Jul 19 12:38:13 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready Jul 19 12:38:13 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vetha780888b: link becomes ready Jul 19 12:38:13 managed-node2 kernel: cni-podman1: port 1(vetha780888b) entered blocking state Jul 19 12:38:13 managed-node2 kernel: cni-podman1: port 1(vetha780888b) entered forwarding state Jul 19 12:38:14 managed-node2 dnsmasq[26752]: listening on cni-podman1(#3): 10.89.0.1 Jul 19 12:38:14 managed-node2 dnsmasq[26754]: started, version 2.79 cachesize 150 Jul 19 12:38:14 managed-node2 dnsmasq[26754]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Jul 19 12:38:14 managed-node2 dnsmasq[26754]: using local addresses only for domain dns.podman Jul 19 12:38:14 managed-node2 dnsmasq[26754]: reading /etc/resolv.conf Jul 19 12:38:14 managed-node2 dnsmasq[26754]: using local addresses only for domain dns.podman Jul 19 12:38:14 managed-node2 dnsmasq[26754]: using nameserver 10.0.2.3#53 Jul 19 12:38:14 managed-node2 dnsmasq[26754]: using nameserver 10.29.169.13#53 Jul 19 12:38:14 managed-node2 dnsmasq[26754]: using nameserver 10.29.170.12#53 Jul 19 12:38:14 managed-node2 dnsmasq[26754]: using nameserver 10.2.32.1#53 Jul 19 12:38:14 managed-node2 dnsmasq[26754]: read /run/user/3001/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Jul 19 12:38:14 managed-node2 conmon[26767]: conmon f153d4517c8778d9470c : failed to write to /proc/self/oom_score_adj: Permission denied Jul 19 12:38:14 managed-node2 conmon[26768]: conmon f153d4517c8778d9470c : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach} Jul 19 12:38:14 managed-node2 conmon[26768]: conmon f153d4517c8778d9470c : terminal_ctrl_fd: 14 Jul 19 12:38:14 managed-node2 conmon[26768]: conmon f153d4517c8778d9470c : winsz read side: 17, winsz write side: 18 Jul 19 12:38:14 managed-node2 conmon[26768]: conmon f153d4517c8778d9470c : container PID: 26778 Jul 19 12:38:14 managed-node2 conmon[26788]: conmon a8773b3857e3e0dd4e13 : failed to write to /proc/self/oom_score_adj: Permission denied Jul 19 12:38:14 managed-node2 conmon[26789]: conmon a8773b3857e3e0dd4e13 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Jul 19 12:38:14 managed-node2 conmon[26789]: conmon a8773b3857e3e0dd4e13 : terminal_ctrl_fd: 13 Jul 19 12:38:14 managed-node2 conmon[26789]: conmon a8773b3857e3e0dd4e13 : winsz read side: 16, winsz write side: 17 Jul 19 12:38:14 managed-node2 conmon[26789]: conmon a8773b3857e3e0dd4e13 : container PID: 26799 Jul 19 12:38:14 managed-node2 platform-python[26557]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jul 19 12:38:14 managed-node2 platform-python[26557]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: 0c3499cd78df9c3cbd4aa0aa8e91445278eb6157f70949cc299bc9c609a42f3c Container: a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 Jul 19 12:38:14 managed-node2 platform-python[26557]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-07-19T12:38:13-04:00" level=info msg="/bin/podman filtering at log level debug" time="2025-07-19T12:38:13-04:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-07-19T12:38:13-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-07-19T12:38:13-04:00" level=info msg="Using sqlite as database backend" time="2025-07-19T12:38:13-04:00" level=debug msg="Using graph driver overlay" time="2025-07-19T12:38:13-04:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2025-07-19T12:38:13-04:00" level=debug msg="Using run root /run/user/3001/containers" time="2025-07-19T12:38:13-04:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2025-07-19T12:38:13-04:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2025-07-19T12:38:13-04:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2025-07-19T12:38:13-04:00" level=debug msg="Using transient store: false" time="2025-07-19T12:38:13-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-19T12:38:13-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-19T12:38:13-04:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2025-07-19T12:38:13-04:00" level=debug msg="Cached value indicated that native-diff is usable" time="2025-07-19T12:38:13-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2025-07-19T12:38:13-04:00" level=debug msg="Initializing event backend file" time="2025-07-19T12:38:13-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-07-19T12:38:13-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-07-19T12:38:13-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-07-19T12:38:13-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-07-19T12:38:13-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-07-19T12:38:13-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-07-19T12:38:13-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-07-19T12:38:13-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" time="2025-07-19T12:38:13-04:00" level=info msg="Setting parallel job count to 7" time="2025-07-19T12:38:13-04:00" level=debug msg="Successfully loaded 1 networks" time="2025-07-19T12:38:13-04:00" level=debug msg="found free device name cni-podman1" time="2025-07-19T12:38:13-04:00" level=debug msg="found free ipv4 network subnet 10.89.0.0/24" time="2025-07-19T12:38:13-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-19 12:38:13.521272 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-07-19T12:38:13-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-07-19T12:38:13-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-19T12:38:13-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:13-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-19T12:38:13-04:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\" does not resolve to an image ID" time="2025-07-19T12:38:13-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-19T12:38:13-04:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\" does not resolve to an image ID" time="2025-07-19T12:38:13-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-19T12:38:13-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:13-04:00" level=debug msg="FROM \"scratch\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2025-07-19T12:38:13-04:00" level=debug msg="Check for idmapped mounts support " time="2025-07-19T12:38:13-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:13-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:13-04:00" level=debug msg="overlay: test mount indicated that volatile is being used" time="2025-07-19T12:38:13-04:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/340addd39a0119ab1a39a6915aa4a3fafdf78511c642616c3fe04da09dcf3345/empty,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/340addd39a0119ab1a39a6915aa4a3fafdf78511c642616c3fe04da09dcf3345/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/340addd39a0119ab1a39a6915aa4a3fafdf78511c642616c3fe04da09dcf3345/work,userxattr,volatile,context=\"system_u:object_r:container_file_t:s0:c480,c514\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Container ID: ac8e6c0ad9d62a1134f2644b1390fd8fa36d22d0d6282cefc7edd95b4f95d64d" time="2025-07-19T12:38:13-04:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:copy Args:[/usr/libexec/podman/catatonit /catatonit] Flags:[] Attrs:map[] Message:COPY /usr/libexec/podman/catatonit /catatonit Heredocs:[] Original:COPY /usr/libexec/podman/catatonit /catatonit}" time="2025-07-19T12:38:13-04:00" level=debug msg="COPY []string(nil), imagebuilder.Copy{FromFS:false, From:\"\", Src:[]string{\"/usr/libexec/podman/catatonit\"}, Dest:\"/catatonit\", Download:false, Chown:\"\", Chmod:\"\", Checksum:\"\", Files:[]imagebuilder.File(nil)}" time="2025-07-19T12:38:13-04:00" level=debug msg="added content file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd" time="2025-07-19T12:38:13-04:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:entrypoint Args:[/catatonit -P] Flags:[] Attrs:map[json:true] Message:ENTRYPOINT /catatonit -P Heredocs:[] Original:ENTRYPOINT [\"/catatonit\", \"-P\"]}" time="2025-07-19T12:38:13-04:00" level=debug msg="COMMIT localhost/podman-pause:4.9.4-dev-1708535009" time="2025-07-19T12:38:13-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\"" time="2025-07-19T12:38:13-04:00" level=debug msg="COMMIT \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\"" time="2025-07-19T12:38:13-04:00" level=debug msg="committing image with reference \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\" is allowed by policy" time="2025-07-19T12:38:13-04:00" level=debug msg="layer list: [\"340addd39a0119ab1a39a6915aa4a3fafdf78511c642616c3fe04da09dcf3345\"]" time="2025-07-19T12:38:13-04:00" level=debug msg="using \"/var/tmp/buildah2427832820\" to hold temporary data" time="2025-07-19T12:38:13-04:00" level=debug msg="Tar with options on /home/podman_basic_user/.local/share/containers/storage/overlay/340addd39a0119ab1a39a6915aa4a3fafdf78511c642616c3fe04da09dcf3345/diff" time="2025-07-19T12:38:13-04:00" level=debug msg="layer \"340addd39a0119ab1a39a6915aa4a3fafdf78511c642616c3fe04da09dcf3345\" size is 767488 bytes, uncompressed digest sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690, possibly-compressed digest sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690" time="2025-07-19T12:38:13-04:00" level=debug msg="OCIv1 config = {\"created\":\"2025-07-19T16:38:13.656892898Z\",\"architecture\":\"amd64\",\"os\":\"linux\",\"config\":{\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Entrypoint\":[\"/catatonit\",\"-P\"],\"Labels\":{\"io.buildah.version\":\"1.33.5\"}},\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"]},\"history\":[{\"created\":\"2025-07-19T16:38:13.656345599Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-07-19T16:38:13.660597339Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-07-19T12:38:13-04:00" level=debug msg="OCIv1 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.oci.image.manifest.v1+json\",\"config\":{\"mediaType\":\"application/vnd.oci.image.config.v1+json\",\"digest\":\"sha256:9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\",\"size\":668},\"layers\":[{\"mediaType\":\"application/vnd.oci.image.layer.v1.tar\",\"digest\":\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\",\"size\":767488}],\"annotations\":{\"org.opencontainers.image.base.digest\":\"\",\"org.opencontainers.image.base.name\":\"\"}}" time="2025-07-19T12:38:13-04:00" level=debug msg="Docker v2s2 config = {\"created\":\"2025-07-19T16:38:13.656892898Z\",\"container\":\"ac8e6c0ad9d62a1134f2644b1390fd8fa36d22d0d6282cefc7edd95b4f95d64d\",\"container_config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":[],\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.33.5\"}},\"config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":[],\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.33.5\"}},\"architecture\":\"amd64\",\"os\":\"linux\",\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"]},\"history\":[{\"created\":\"2025-07-19T16:38:13.656345599Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-07-19T16:38:13.660597339Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-07-19T12:38:13-04:00" level=debug msg="Docker v2s2 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.docker.distribution.manifest.v2+json\",\"config\":{\"mediaType\":\"application/vnd.docker.container.image.v1+json\",\"size\":1342,\"digest\":\"sha256:803cd64c1bc1a2e7297b3d5f520a915c581e4037aabac925fb21fc3ad8b279ee\"},\"layers\":[{\"mediaType\":\"application/vnd.docker.image.rootfs.diff.tar\",\"size\":767488,\"digest\":\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"}]}" time="2025-07-19T12:38:13-04:00" level=debug msg="Using SQLite blob info cache at /home/podman_basic_user/.local/share/containers/cache/blob-info-cache-v1.sqlite" time="2025-07-19T12:38:13-04:00" level=debug msg="IsRunningImageAllowed for image containers-storage:" time="2025-07-19T12:38:13-04:00" level=debug msg=" Using transport \"containers-storage\" policy section " time="2025-07-19T12:38:13-04:00" level=debug msg=" Requirement 0: allowed" time="2025-07-19T12:38:13-04:00" level=debug msg="Overall: allowed" time="2025-07-19T12:38:13-04:00" level=debug msg="start reading config" time="2025-07-19T12:38:13-04:00" level=debug msg="finished reading config" time="2025-07-19T12:38:13-04:00" level=debug msg="Manifest has MIME type application/vnd.oci.image.manifest.v1+json, ordered candidate list [application/vnd.oci.image.manifest.v1+json, application/vnd.docker.distribution.manifest.v2+json, application/vnd.docker.distribution.manifest.v1+prettyjws, application/vnd.docker.distribution.manifest.v1+json]" time="2025-07-19T12:38:13-04:00" level=debug msg="... will first try using the original manifest unmodified" time="2025-07-19T12:38:13-04:00" level=debug msg="Checking if we can reuse blob sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690: general substitution = true, compression for MIME type \"application/vnd.oci.image.layer.v1.tar\" = true" time="2025-07-19T12:38:13-04:00" level=debug msg="reading layer \"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"" time="2025-07-19T12:38:13-04:00" level=debug msg="No compression detected" time="2025-07-19T12:38:13-04:00" level=debug msg="Using original blob without modification" time="2025-07-19T12:38:13-04:00" level=debug msg="Applying tar in /home/podman_basic_user/.local/share/containers/storage/overlay/d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690/diff" time="2025-07-19T12:38:13-04:00" level=debug msg="finished reading layer \"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"" time="2025-07-19T12:38:13-04:00" level=debug msg="No compression detected" time="2025-07-19T12:38:13-04:00" level=debug msg="Compression change for blob sha256:9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63 (\"application/vnd.oci.image.config.v1+json\") not supported" time="2025-07-19T12:38:13-04:00" level=debug msg="Using original blob without modification" time="2025-07-19T12:38:13-04:00" level=debug msg="setting image creation date to 2025-07-19 16:38:13.656892898 +0000 UTC" time="2025-07-19T12:38:13-04:00" level=debug msg="created new image ID \"9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\" with metadata \"{}\"" time="2025-07-19T12:38:13-04:00" level=debug msg="added name \"localhost/podman-pause:4.9.4-dev-1708535009\" to image \"9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\"" time="2025-07-19T12:38:13-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\"" time="2025-07-19T12:38:13-04:00" level=debug msg="printing final image id \"9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Pod using bridge network mode" time="2025-07-19T12:38:13-04:00" level=debug msg="Got pod cgroup as /libpod_parent/0c3499cd78df9c3cbd4aa0aa8e91445278eb6157f70949cc299bc9c609a42f3c" time="2025-07-19T12:38:13-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-19T12:38:13-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:13-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-19T12:38:13-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-19T12:38:13-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63)" time="2025-07-19T12:38:13-04:00" level=debug msg="exporting opaque data as blob \"sha256:9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Inspecting image 9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63" time="2025-07-19T12:38:13-04:00" level=debug msg="exporting opaque data as blob \"sha256:9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Inspecting image 9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63" time="2025-07-19T12:38:13-04:00" level=debug msg="Inspecting image 9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63" time="2025-07-19T12:38:13-04:00" level=debug msg="using systemd mode: false" time="2025-07-19T12:38:13-04:00" level=debug msg="setting container name 0c3499cd78df-infra" time="2025-07-19T12:38:13-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Allocated lock 1 for container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c" time="2025-07-19T12:38:13-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\"" time="2025-07-19T12:38:13-04:00" level=debug msg="exporting opaque data as blob \"sha256:9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Created container \"f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Container \"f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Container \"f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c\" has run directory \"/run/user/3001/containers/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:13-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:13-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-19T12:38:13-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:13-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-19T12:38:13-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-07-19T12:38:13-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:13-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:13-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-19T12:38:13-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:13-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-19T12:38:13-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:13-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:13-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-19T12:38:13-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:13-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-19T12:38:13-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-19T12:38:13-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:13-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:13-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-19T12:38:13-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:13-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-19T12:38:13-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-19T12:38:13-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-19T12:38:13-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-19T12:38:13-04:00" level=debug msg="using systemd mode: false" time="2025-07-19T12:38:13-04:00" level=debug msg="adding container to pod httpd1" time="2025-07-19T12:38:13-04:00" level=debug msg="setting container name httpd1-httpd1" time="2025-07-19T12:38:13-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-07-19T12:38:13-04:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-07-19T12:38:13-04:00" level=debug msg="Adding mount /proc" time="2025-07-19T12:38:13-04:00" level=debug msg="Adding mount /dev" time="2025-07-19T12:38:13-04:00" level=debug msg="Adding mount /dev/pts" time="2025-07-19T12:38:13-04:00" level=debug msg="Adding mount /dev/mqueue" time="2025-07-19T12:38:13-04:00" level=debug msg="Adding mount /sys" time="2025-07-19T12:38:13-04:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-07-19T12:38:13-04:00" level=debug msg="Allocated lock 2 for container a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458" time="2025-07-19T12:38:13-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Created container \"a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Container \"a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Container \"a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458\" has run directory \"/run/user/3001/containers/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Strongconnecting node f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c" time="2025-07-19T12:38:13-04:00" level=debug msg="Pushed f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c onto stack" time="2025-07-19T12:38:13-04:00" level=debug msg="Finishing node f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c. Popped f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c off stack" time="2025-07-19T12:38:13-04:00" level=debug msg="Strongconnecting node a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458" time="2025-07-19T12:38:13-04:00" level=debug msg="Pushed a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 onto stack" time="2025-07-19T12:38:13-04:00" level=debug msg="Finishing node a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458. Popped a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 off stack" time="2025-07-19T12:38:13-04:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/T3ZNBLNG2W7D2UELJU7O7YZ76X,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/127ba02fe95229d135be77cff068ce7d066eb98b87e356cce8c34501e113568e/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/127ba02fe95229d135be77cff068ce7d066eb98b87e356cce8c34501e113568e/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c330,c361\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Mounted container \"f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/127ba02fe95229d135be77cff068ce7d066eb98b87e356cce8c34501e113568e/merged\"" time="2025-07-19T12:38:13-04:00" level=debug msg="Created root filesystem for container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c at /home/podman_basic_user/.local/share/containers/storage/overlay/127ba02fe95229d135be77cff068ce7d066eb98b87e356cce8c34501e113568e/merged" time="2025-07-19T12:38:13-04:00" level=debug msg="Made network namespace at /run/user/3001/netns/netns-06b3cc7d-4137-7077-edbe-bd7530bc2101 for container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c" time="2025-07-19T12:38:13-04:00" level=debug msg="creating rootless network namespace with name \"rootless-netns-d22c9f230d0691b8f418\"" time="2025-07-19T12:38:13-04:00" level=debug msg="slirp4netns command: /bin/slirp4netns --disable-host-loopback --mtu=65520 --enable-sandbox --enable-seccomp --enable-ipv6 -c -r 3 --netns-type=path /run/user/3001/netns/rootless-netns-d22c9f230d0691b8f418 tap0" time="2025-07-19T12:38:13-04:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" time="2025-07-19T12:38:14-04:00" level=debug msg="cni result for container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c network podman-default-kube-network: &{0.4.0 [{Name:cni-podman1 Mac:76:19:76:c8:78:b3 Sandbox:} {Name:vetha780888b Mac:f2:ee:6b:fd:41:a0 Sandbox:} {Name:eth0 Mac:2e:67:99:01:50:2a Sandbox:/run/user/3001/netns/netns-06b3cc7d-4137-7077-edbe-bd7530bc2101}] [{Version:4 Interface:0xc000c00b08 Address:{IP:10.89.0.2 Mask:ffffff00} Gateway:10.89.0.1}] [{Dst:{IP:0.0.0.0 Mask:00000000} GW:}] {[10.89.0.1] [dns.podman] []}}" time="2025-07-19T12:38:14-04:00" level=debug msg="rootlessport: time=\"2025-07-19T12:38:14-04:00\" level=info msg=\"Starting parent driver\"\n" time="2025-07-19T12:38:14-04:00" level=debug msg="rootlessport: time=\"2025-07-19T12:38:14-04:00\" level=info msg=\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport2421233428/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport2421233428/.bp.sock]\"\n" time="2025-07-19T12:38:14-04:00" level=debug msg="rootlessport: time=\"2025-07-19T12:38:14-04:00\" level=info msg=\"Starting child driver in child netns (\\\"/proc/self/exe\\\" [rootlessport-child])\"\n" time="2025-07-19T12:38:14-04:00" level=debug msg="rootlessport: time=\"2025-07-19T12:38:14-04:00\" level=info msg=\"Waiting for initComplete\"\n" time="2025-07-19T12:38:14-04:00" level=debug msg="rootlessport: time=\"2025-07-19T12:38:14-04:00\" level=info msg=\"initComplete is closed; parent and child established the communication channel\"\n" time="2025-07-19T12:38:14-04:00" level=debug msg="rootlessport: time=\"2025-07-19T12:38:14-04:00\" level=info msg=\"Exposing ports [{ 80 15001 1 tcp}]\"\n" time="2025-07-19T12:38:14-04:00" level=debug msg="rootlessport is ready" time="2025-07-19T12:38:14-04:00" level=debug msg="rootlessport: time=\"2025-07-19T12:38:14-04:00\" level=info msg=Ready\n" time="2025-07-19T12:38:14-04:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2025-07-19T12:38:14-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-07-19T12:38:14-04:00" level=debug msg="Workdir \"/\" resolved to host path \"/home/podman_basic_user/.local/share/containers/storage/overlay/127ba02fe95229d135be77cff068ce7d066eb98b87e356cce8c34501e113568e/merged\"" time="2025-07-19T12:38:14-04:00" level=debug msg="Created OCI spec for container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata/config.json" time="2025-07-19T12:38:14-04:00" level=debug msg="Got pod cgroup as " time="2025-07-19T12:38:14-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-07-19T12:38:14-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c -u f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c -r /usr/bin/runc -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata -p /run/user/3001/containers/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata/pidfile -n 0c3499cd78df-infra --exit-dir /run/user/3001/libpod/tmp/exits --full-attach -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg cgroupfs --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c]" time="2025-07-19T12:38:14-04:00" level=info msg="Failed to add conmon to cgroupfs sandbox cgroup: creating cgroup for blkio: mkdir /sys/fs/cgroup/blkio/libpod_parent: permission denied" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2025-07-19T12:38:14-04:00" level=debug msg="Received: 26778" time="2025-07-19T12:38:14-04:00" level=info msg="Got Conmon PID as 26768" time="2025-07-19T12:38:14-04:00" level=debug msg="Created container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c in OCI runtime" time="2025-07-19T12:38:14-04:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-07-19T12:38:14-04:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-07-19T12:38:14-04:00" level=debug msg="Starting container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c with command [/catatonit -P]" time="2025-07-19T12:38:14-04:00" level=debug msg="Started container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c" time="2025-07-19T12:38:14-04:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/Q6SNT2SFVF32LFZYXFZFNM34JV,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/30ccda56ccc67908370dcb7e56c6e44595e70d6af5e179ee106b4a3d90b31e0f/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/30ccda56ccc67908370dcb7e56c6e44595e70d6af5e179ee106b4a3d90b31e0f/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c330,c361\"" time="2025-07-19T12:38:14-04:00" level=debug msg="Mounted container \"a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/30ccda56ccc67908370dcb7e56c6e44595e70d6af5e179ee106b4a3d90b31e0f/merged\"" time="2025-07-19T12:38:14-04:00" level=debug msg="Created root filesystem for container a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 at /home/podman_basic_user/.local/share/containers/storage/overlay/30ccda56ccc67908370dcb7e56c6e44595e70d6af5e179ee106b4a3d90b31e0f/merged" time="2025-07-19T12:38:14-04:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2025-07-19T12:38:14-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-07-19T12:38:14-04:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2025-07-19T12:38:14-04:00" level=debug msg="Created OCI spec for container a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata/config.json" time="2025-07-19T12:38:14-04:00" level=debug msg="Got pod cgroup as " time="2025-07-19T12:38:14-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-07-19T12:38:14-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 -u a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 -r /usr/bin/runc -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata -p /run/user/3001/containers/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata/pidfile -n httpd1-httpd1 --exit-dir /run/user/3001/libpod/tmp/exits --full-attach -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg cgroupfs --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458]" time="2025-07-19T12:38:14-04:00" level=info msg="Failed to add conmon to cgroupfs sandbox cgroup: creating cgroup for cpu: mkdir /sys/fs/cgroup/cpu/conmon: permission denied" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2025-07-19T12:38:14-04:00" level=debug msg="Received: 26799" time="2025-07-19T12:38:14-04:00" level=info msg="Got Conmon PID as 26789" time="2025-07-19T12:38:14-04:00" level=debug msg="Created container a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 in OCI runtime" time="2025-07-19T12:38:14-04:00" level=debug msg="Starting container a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 with command [/bin/busybox-extras httpd -f -p 80]" time="2025-07-19T12:38:14-04:00" level=debug msg="Started container a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458" time="2025-07-19T12:38:14-04:00" level=debug msg="Called kube.PersistentPostRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-07-19T12:38:14-04:00" level=debug msg="Shutting down engines" Jul 19 12:38:14 managed-node2 platform-python[26557]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jul 19 12:38:14 managed-node2 sudo[26554]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:38:14 managed-node2 sudo[26930]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lmrmjlkkbsynluhmnwsuczlkvrvxmsws ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943094.4993763-15928-49906019321868/AnsiballZ_systemd.py' Jul 19 12:38:14 managed-node2 sudo[26930]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:38:14 managed-node2 platform-python[26933]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Jul 19 12:38:14 managed-node2 systemd[25528]: Reloading. Jul 19 12:38:14 managed-node2 sudo[26930]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:38:15 managed-node2 dnsmasq[26754]: listening on cni-podman1(#3): fe80::7419:76ff:fec8:78b3%cni-podman1 Jul 19 12:38:15 managed-node2 sudo[27068]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gajwmakwkbshdhvfjxukfbkcepplsltd ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943095.0683737-15953-10632554991967/AnsiballZ_systemd.py' Jul 19 12:38:15 managed-node2 sudo[27068]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:38:15 managed-node2 platform-python[27071]: ansible-systemd Invoked with name= scope=user enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None user=None Jul 19 12:38:15 managed-node2 systemd[25528]: Reloading. Jul 19 12:38:15 managed-node2 sudo[27068]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:38:15 managed-node2 sudo[27207]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dcmefbhxtsrhgtubtgzkfqommjfyuibn ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943095.7285938-15986-176076416334674/AnsiballZ_systemd.py' Jul 19 12:38:15 managed-node2 sudo[27207]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:38:16 managed-node2 platform-python[27210]: ansible-systemd Invoked with name= scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None Jul 19 12:38:16 managed-node2 systemd[25528]: Created slice podman\x2dkube.slice. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:16 managed-node2 systemd[25528]: Starting A template for running K8s workloads via podman-kube-play... -- Subject: Unit UNIT has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun starting up. Jul 19 12:38:16 managed-node2 conmon[26789]: conmon a8773b3857e3e0dd4e13 : container 26799 exited with status 137 Jul 19 12:38:16 managed-node2 conmon[26768]: conmon f153d4517c8778d9470c : container 26778 exited with status 137 Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458)" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=info msg="Using sqlite as database backend" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using graph driver overlay" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using run root /run/user/3001/containers" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using transient store: false" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Cached value indicated that metacopy is not being used" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Cached value indicated that native-diff is usable" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Initializing event backend file" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=info msg="Setting parallel job count to 7" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c)" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=info msg="Using sqlite as database backend" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using graph driver overlay" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using run root /run/user/3001/containers" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using transient store: false" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Cached value indicated that metacopy is not being used" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Cached value indicated that native-diff is usable" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Initializing event backend file" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=info msg="Setting parallel job count to 7" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458)" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time="2025-07-19T12:38:16-04:00" level=debug msg="Shutting down engines" Jul 19 12:38:16 managed-node2 kernel: cni-podman1: port 1(vetha780888b) entered disabled state Jul 19 12:38:16 managed-node2 kernel: device vetha780888b left promiscuous mode Jul 19 12:38:16 managed-node2 kernel: cni-podman1: port 1(vetha780888b) entered disabled state Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c)" Jul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time="2025-07-19T12:38:16-04:00" level=debug msg="Shutting down engines" Jul 19 12:38:16 managed-node2 podman[27216]: Pods stopped: Jul 19 12:38:16 managed-node2 podman[27216]: 0c3499cd78df9c3cbd4aa0aa8e91445278eb6157f70949cc299bc9c609a42f3c Jul 19 12:38:16 managed-node2 podman[27216]: Pods removed: Jul 19 12:38:16 managed-node2 podman[27216]: 0c3499cd78df9c3cbd4aa0aa8e91445278eb6157f70949cc299bc9c609a42f3c Jul 19 12:38:16 managed-node2 podman[27216]: Secrets removed: Jul 19 12:38:16 managed-node2 podman[27216]: Volumes removed: Jul 19 12:38:16 managed-node2 systemd[25528]: Started rootless-netns-1ff27aec.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:16 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): veth24653eaf: link is not ready Jul 19 12:38:16 managed-node2 kernel: cni-podman1: port 1(veth24653eaf) entered blocking state Jul 19 12:38:16 managed-node2 kernel: cni-podman1: port 1(veth24653eaf) entered disabled state Jul 19 12:38:16 managed-node2 kernel: device veth24653eaf entered promiscuous mode Jul 19 12:38:16 managed-node2 kernel: cni-podman1: port 1(veth24653eaf) entered blocking state Jul 19 12:38:16 managed-node2 kernel: cni-podman1: port 1(veth24653eaf) entered forwarding state Jul 19 12:38:16 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth24653eaf: link becomes ready Jul 19 12:38:16 managed-node2 dnsmasq[27465]: listening on cni-podman1(#3): 10.89.0.1 Jul 19 12:38:16 managed-node2 dnsmasq[27467]: started, version 2.79 cachesize 150 Jul 19 12:38:16 managed-node2 dnsmasq[27467]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Jul 19 12:38:16 managed-node2 dnsmasq[27467]: using local addresses only for domain dns.podman Jul 19 12:38:16 managed-node2 dnsmasq[27467]: reading /etc/resolv.conf Jul 19 12:38:16 managed-node2 dnsmasq[27467]: using local addresses only for domain dns.podman Jul 19 12:38:16 managed-node2 dnsmasq[27467]: using nameserver 10.0.2.3#53 Jul 19 12:38:16 managed-node2 dnsmasq[27467]: using nameserver 10.29.169.13#53 Jul 19 12:38:16 managed-node2 dnsmasq[27467]: using nameserver 10.29.170.12#53 Jul 19 12:38:16 managed-node2 dnsmasq[27467]: using nameserver 10.2.32.1#53 Jul 19 12:38:16 managed-node2 dnsmasq[27467]: read /run/user/3001/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Jul 19 12:38:17 managed-node2 podman[27216]: Pod: Jul 19 12:38:17 managed-node2 podman[27216]: 2d1d21944d4ca561d0a237f029182068a925bb49513d0d920c7ad527b530a87a Jul 19 12:38:17 managed-node2 podman[27216]: Container: Jul 19 12:38:17 managed-node2 podman[27216]: fbdb7144dbaf3a0b80484872c9bcae1ed8f6a793661386bc91aa084464c69027 Jul 19 12:38:17 managed-node2 systemd[25528]: Started A template for running K8s workloads via podman-kube-play. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:17 managed-node2 sudo[27207]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:38:17 managed-node2 platform-python[27643]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 19 12:38:18 managed-node2 platform-python[27767]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:38:18 managed-node2 dnsmasq[27467]: listening on cni-podman1(#3): fe80::f826:e2ff:fec6:eea3%cni-podman1 Jul 19 12:38:19 managed-node2 platform-python[27892]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:20 managed-node2 platform-python[28016]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:38:21 managed-node2 platform-python[28139]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:38:21 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 19 12:38:21 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 19 12:38:22 managed-node2 platform-python[28430]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:38:23 managed-node2 platform-python[28553]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:38:23 managed-node2 platform-python[28676]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 19 12:38:23 managed-node2 platform-python[28775]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752943103.1443834-16356-64021954424990/source _original_basename=tmp0hh2oj3u follow=False checksum=b06d991e561d2233cf906d852db9b578dc61ce26 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Jul 19 12:38:24 managed-node2 platform-python[28900]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:38:24 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice. -- Subject: Unit machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice has finished starting up. -- -- The start-up result is done. Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.3685] manager: (cni-podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.3718] manager: (vethf4165b4a): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Jul 19 12:38:24 managed-node2 systemd-udevd[28949]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Jul 19 12:38:24 managed-node2 systemd-udevd[28949]: Could not generate persistent MAC address for vethf4165b4a: No such file or directory Jul 19 12:38:24 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vethf4165b4a: link is not ready Jul 19 12:38:24 managed-node2 systemd-udevd[28948]: Using default interface naming scheme 'rhel-8.0'. Jul 19 12:38:24 managed-node2 systemd-udevd[28948]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Jul 19 12:38:24 managed-node2 systemd-udevd[28948]: Could not generate persistent MAC address for cni-podman1: No such file or directory Jul 19 12:38:24 managed-node2 kernel: cni-podman1: port 1(vethf4165b4a) entered blocking state Jul 19 12:38:24 managed-node2 kernel: cni-podman1: port 1(vethf4165b4a) entered disabled state Jul 19 12:38:24 managed-node2 kernel: device vethf4165b4a entered promiscuous mode Jul 19 12:38:24 managed-node2 dbus-daemon[591]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.5' (uid=0 pid=661 comm="/usr/sbin/NetworkManager --no-daemon " label="system_u:system_r:NetworkManager_t:s0") Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4140] device (cni-podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', sys-iface-state: 'external') Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4145] device (cni-podman1): state change: unavailable -> disconnected (reason 'connection-assumed', sys-iface-state: 'external') Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4153] device (cni-podman1): Activation: starting connection 'cni-podman1' (288926ec-c137-47aa-80eb-b1812c1bfed2) Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4154] device (cni-podman1): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'external') Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4157] device (cni-podman1): state change: prepare -> config (reason 'none', sys-iface-state: 'external') Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4159] device (cni-podman1): state change: config -> ip-config (reason 'none', sys-iface-state: 'external') Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4160] device (cni-podman1): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'external') Jul 19 12:38:24 managed-node2 systemd[1]: Starting Network Manager Script Dispatcher Service... -- Subject: Unit NetworkManager-dispatcher.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has begun starting up. Jul 19 12:38:24 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready Jul 19 12:38:24 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready Jul 19 12:38:24 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vethf4165b4a: link becomes ready Jul 19 12:38:24 managed-node2 kernel: cni-podman1: port 1(vethf4165b4a) entered blocking state Jul 19 12:38:24 managed-node2 kernel: cni-podman1: port 1(vethf4165b4a) entered forwarding state Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4643] device (vethf4165b4a): carrier: link connected Jul 19 12:38:24 managed-node2 dbus-daemon[591]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher' Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4647] device (cni-podman1): carrier: link connected Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4663] device (cni-podman1): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'external') Jul 19 12:38:24 managed-node2 systemd[1]: Started Network Manager Script Dispatcher Service. -- Subject: Unit NetworkManager-dispatcher.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has finished starting up. -- -- The start-up result is done. Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4665] device (cni-podman1): state change: secondaries -> activated (reason 'none', sys-iface-state: 'external') Jul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4669] device (cni-podman1): Activation: successful, device activated. Jul 19 12:38:24 managed-node2 dnsmasq[29070]: listening on cni-podman1(#3): 10.89.0.1 Jul 19 12:38:24 managed-node2 dnsmasq[29074]: started, version 2.79 cachesize 150 Jul 19 12:38:24 managed-node2 dnsmasq[29074]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Jul 19 12:38:24 managed-node2 dnsmasq[29074]: using local addresses only for domain dns.podman Jul 19 12:38:24 managed-node2 dnsmasq[29074]: reading /etc/resolv.conf Jul 19 12:38:24 managed-node2 dnsmasq[29074]: using local addresses only for domain dns.podman Jul 19 12:38:24 managed-node2 dnsmasq[29074]: using nameserver 10.29.169.13#53 Jul 19 12:38:24 managed-node2 dnsmasq[29074]: using nameserver 10.29.170.12#53 Jul 19 12:38:24 managed-node2 dnsmasq[29074]: using nameserver 10.2.32.1#53 Jul 19 12:38:24 managed-node2 dnsmasq[29074]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Jul 19 12:38:24 managed-node2 systemd[1]: Started libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope. -- Subject: Unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:24 managed-node2 conmon[29076]: conmon c67c85c8cb2aae9cc679 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Jul 19 12:38:24 managed-node2 conmon[29076]: conmon c67c85c8cb2aae9cc679 : terminal_ctrl_fd: 13 Jul 19 12:38:24 managed-node2 conmon[29076]: conmon c67c85c8cb2aae9cc679 : winsz read side: 17, winsz write side: 18 Jul 19 12:38:24 managed-node2 systemd[1]: Started libcontainer container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c. -- Subject: Unit libpod-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:24 managed-node2 conmon[29076]: conmon c67c85c8cb2aae9cc679 : container PID: 29087 Jul 19 12:38:24 managed-node2 systemd[1]: Started libpod-conmon-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope. -- Subject: Unit libpod-conmon-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:24 managed-node2 conmon[29098]: conmon add9038e274085c173b1 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach} Jul 19 12:38:24 managed-node2 conmon[29098]: conmon add9038e274085c173b1 : terminal_ctrl_fd: 12 Jul 19 12:38:24 managed-node2 conmon[29098]: conmon add9038e274085c173b1 : winsz read side: 16, winsz write side: 17 Jul 19 12:38:24 managed-node2 systemd[1]: Started libcontainer container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f. -- Subject: Unit libpod-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:24 managed-node2 conmon[29098]: conmon add9038e274085c173b1 : container PID: 29108 Jul 19 12:38:24 managed-node2 platform-python[28900]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Jul 19 12:38:24 managed-node2 platform-python[28900]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816 Container: add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f Jul 19 12:38:24 managed-node2 platform-python[28900]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-07-19T12:38:24-04:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2025-07-19T12:38:24-04:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-07-19T12:38:24-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-07-19T12:38:24-04:00" level=info msg="Using sqlite as database backend" time="2025-07-19T12:38:24-04:00" level=debug msg="Using graph driver overlay" time="2025-07-19T12:38:24-04:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Using run root /run/containers/storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2025-07-19T12:38:24-04:00" level=debug msg="Using tmp dir /run/libpod" time="2025-07-19T12:38:24-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2025-07-19T12:38:24-04:00" level=debug msg="Using transient store: false" time="2025-07-19T12:38:24-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-19T12:38:24-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-19T12:38:24-04:00" level=debug msg="Cached value indicated that metacopy is being used" time="2025-07-19T12:38:24-04:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2025-07-19T12:38:24-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2025-07-19T12:38:24-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2025-07-19T12:38:24-04:00" level=debug msg="Initializing event backend file" time="2025-07-19T12:38:24-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-07-19T12:38:24-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-07-19T12:38:24-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-07-19T12:38:24-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-07-19T12:38:24-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-07-19T12:38:24-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-07-19T12:38:24-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-07-19T12:38:24-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" time="2025-07-19T12:38:24-04:00" level=info msg="Setting parallel job count to 7" time="2025-07-19T12:38:24-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-19 12:35:55.640649556 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-07-19T12:38:24-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-07-19T12:38:24-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:24-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-19T12:38:24-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a)" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Pod using bridge network mode" time="2025-07-19T12:38:24-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice for parent machine.slice and name libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816" time="2025-07-19T12:38:24-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice" time="2025-07-19T12:38:24-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice" time="2025-07-19T12:38:24-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:24-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-19T12:38:24-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a)" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Inspecting image fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Inspecting image fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a" time="2025-07-19T12:38:24-04:00" level=debug msg="Inspecting image fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a" time="2025-07-19T12:38:24-04:00" level=debug msg="using systemd mode: false" time="2025-07-19T12:38:24-04:00" level=debug msg="setting container name f8000a88fe4a-infra" time="2025-07-19T12:38:24-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Allocated lock 1 for container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c" time="2025-07-19T12:38:24-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\"" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2025-07-19T12:38:24-04:00" level=debug msg="Check for idmapped mounts support " time="2025-07-19T12:38:24-04:00" level=debug msg="Created container \"c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Container \"c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\" has work directory \"/var/lib/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Container \"c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\" has run directory \"/run/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:24-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-19T12:38:24-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-07-19T12:38:24-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:24-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-19T12:38:24-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:24-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-19T12:38:24-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:38:24-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-19T12:38:24-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-19T12:38:24-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-19T12:38:24-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-19T12:38:24-04:00" level=debug msg="using systemd mode: false" time="2025-07-19T12:38:24-04:00" level=debug msg="adding container to pod httpd2" time="2025-07-19T12:38:24-04:00" level=debug msg="setting container name httpd2-httpd2" time="2025-07-19T12:38:24-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-07-19T12:38:24-04:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-07-19T12:38:24-04:00" level=debug msg="Adding mount /proc" time="2025-07-19T12:38:24-04:00" level=debug msg="Adding mount /dev" time="2025-07-19T12:38:24-04:00" level=debug msg="Adding mount /dev/pts" time="2025-07-19T12:38:24-04:00" level=debug msg="Adding mount /dev/mqueue" time="2025-07-19T12:38:24-04:00" level=debug msg="Adding mount /sys" time="2025-07-19T12:38:24-04:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-07-19T12:38:24-04:00" level=debug msg="Allocated lock 2 for container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f" time="2025-07-19T12:38:24-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Created container \"add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Container \"add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\" has work directory \"/var/lib/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Container \"add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\" has run directory \"/run/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Strongconnecting node c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c" time="2025-07-19T12:38:24-04:00" level=debug msg="Pushed c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c onto stack" time="2025-07-19T12:38:24-04:00" level=debug msg="Finishing node c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c. Popped c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c off stack" time="2025-07-19T12:38:24-04:00" level=debug msg="Strongconnecting node add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f" time="2025-07-19T12:38:24-04:00" level=debug msg="Pushed add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f onto stack" time="2025-07-19T12:38:24-04:00" level=debug msg="Finishing node add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f. Popped add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f off stack" time="2025-07-19T12:38:24-04:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/HXWSEHVDVE6HABOKZ6B2SSNLKD,upperdir=/var/lib/containers/storage/overlay/6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e/diff,workdir=/var/lib/containers/storage/overlay/6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c723,c1018\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Mounted container \"c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\" at \"/var/lib/containers/storage/overlay/6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e/merged\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Created root filesystem for container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c at /var/lib/containers/storage/overlay/6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e/merged" time="2025-07-19T12:38:24-04:00" level=debug msg="Made network namespace at /run/netns/netns-f67fee73-2bbe-5ce9-31b0-8129b0eb7f47 for container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c" time="2025-07-19T12:38:24-04:00" level=debug msg="cni result for container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c network podman-default-kube-network: &{0.4.0 [{Name:cni-podman1 Mac:ee:2d:d5:97:9a:6b Sandbox:} {Name:vethf4165b4a Mac:d2:23:54:53:0f:5f Sandbox:} {Name:eth0 Mac:ea:eb:9c:fe:80:d8 Sandbox:/run/netns/netns-f67fee73-2bbe-5ce9-31b0-8129b0eb7f47}] [{Version:4 Interface:0xc0005a9428 Address:{IP:10.89.0.2 Mask:ffffff00} Gateway:10.89.0.1}] [{Dst:{IP:0.0.0.0 Mask:00000000} GW:}] {[10.89.0.1] [dns.podman] []}}" time="2025-07-19T12:38:24-04:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2025-07-19T12:38:24-04:00" level=debug msg="Setting Cgroups for container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c to machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice:libpod:c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c" time="2025-07-19T12:38:24-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-07-19T12:38:24-04:00" level=debug msg="Workdir \"/\" resolved to host path \"/var/lib/containers/storage/overlay/6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e/merged\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Created OCI spec for container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c at /var/lib/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata/config.json" time="2025-07-19T12:38:24-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice for parent machine.slice and name libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816" time="2025-07-19T12:38:24-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice" time="2025-07-19T12:38:24-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice" time="2025-07-19T12:38:24-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-07-19T12:38:24-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c -u c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c -r /usr/bin/runc -b /var/lib/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata -p /run/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata/pidfile -n f8000a88fe4a-infra --exit-dir /run/libpod/exits --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c]" time="2025-07-19T12:38:24-04:00" level=info msg="Running conmon under slice machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice and unitName libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope" time="2025-07-19T12:38:24-04:00" level=debug msg="Received: 29087" time="2025-07-19T12:38:24-04:00" level=info msg="Got Conmon PID as 29076" time="2025-07-19T12:38:24-04:00" level=debug msg="Created container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c in OCI runtime" time="2025-07-19T12:38:24-04:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-07-19T12:38:24-04:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-07-19T12:38:24-04:00" level=debug msg="Starting container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c with command [/catatonit -P]" time="2025-07-19T12:38:24-04:00" level=debug msg="Started container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c" time="2025-07-19T12:38:24-04:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/HJUTFIUMULI3FBOA3A6VGXTPPL,upperdir=/var/lib/containers/storage/overlay/ea49505f4f2f1b3e93444d6d56c71c0ed1bb99d0acd9df081de88db0be157d45/diff,workdir=/var/lib/containers/storage/overlay/ea49505f4f2f1b3e93444d6d56c71c0ed1bb99d0acd9df081de88db0be157d45/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c723,c1018\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Mounted container \"add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\" at \"/var/lib/containers/storage/overlay/ea49505f4f2f1b3e93444d6d56c71c0ed1bb99d0acd9df081de88db0be157d45/merged\"" time="2025-07-19T12:38:24-04:00" level=debug msg="Created root filesystem for container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f at /var/lib/containers/storage/overlay/ea49505f4f2f1b3e93444d6d56c71c0ed1bb99d0acd9df081de88db0be157d45/merged" time="2025-07-19T12:38:24-04:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2025-07-19T12:38:24-04:00" level=debug msg="Setting Cgroups for container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f to machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice:libpod:add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f" time="2025-07-19T12:38:24-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-07-19T12:38:24-04:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2025-07-19T12:38:24-04:00" level=debug msg="Created OCI spec for container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f at /var/lib/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata/config.json" time="2025-07-19T12:38:24-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice for parent machine.slice and name libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816" time="2025-07-19T12:38:24-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice" time="2025-07-19T12:38:24-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice" time="2025-07-19T12:38:24-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-07-19T12:38:24-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f -u add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f -r /usr/bin/runc -b /var/lib/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata -p /run/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f]" time="2025-07-19T12:38:24-04:00" level=info msg="Running conmon under slice machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice and unitName libpod-conmon-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope" time="2025-07-19T12:38:24-04:00" level=debug msg="Received: 29108" time="2025-07-19T12:38:24-04:00" level=info msg="Got Conmon PID as 29098" time="2025-07-19T12:38:24-04:00" level=debug msg="Created container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f in OCI runtime" time="2025-07-19T12:38:24-04:00" level=debug msg="Starting container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f with command [/bin/busybox-extras httpd -f -p 80]" time="2025-07-19T12:38:24-04:00" level=debug msg="Started container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f" time="2025-07-19T12:38:24-04:00" level=debug msg="Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-07-19T12:38:24-04:00" level=debug msg="Shutting down engines" Jul 19 12:38:24 managed-node2 platform-python[28900]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jul 19 12:38:25 managed-node2 platform-python[29239]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Jul 19 12:38:25 managed-node2 systemd[1]: Reloading. Jul 19 12:38:25 managed-node2 dnsmasq[29074]: listening on cni-podman1(#3): fe80::ec2d:d5ff:fe97:9a6b%cni-podman1 Jul 19 12:38:26 managed-node2 platform-python[29400]: ansible-systemd Invoked with name= scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None user=None Jul 19 12:38:26 managed-node2 systemd[1]: Reloading. Jul 19 12:38:26 managed-node2 platform-python[29563]: ansible-systemd Invoked with name= scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None Jul 19 12:38:26 managed-node2 systemd[1]: Created slice system-podman\x2dkube.slice. -- Subject: Unit system-podman\x2dkube.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit system-podman\x2dkube.slice has finished starting up. -- -- The start-up result is done. Jul 19 12:38:26 managed-node2 systemd[1]: Starting A template for running K8s workloads via podman-kube-play... -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun starting up. Jul 19 12:38:26 managed-node2 conmon[29076]: conmon c67c85c8cb2aae9cc679 : container 29087 exited with status 137 Jul 19 12:38:26 managed-node2 systemd[1]: libpod-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has successfully entered the 'dead' state. Jul 19 12:38:26 managed-node2 systemd[1]: libpod-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope: Consumed 32ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope completed and consumed the indicated resources. Jul 19 12:38:26 managed-node2 conmon[29098]: conmon add9038e274085c173b1 : container 29108 exited with status 137 Jul 19 12:38:26 managed-node2 systemd[1]: libpod-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope has successfully entered the 'dead' state. Jul 19 12:38:26 managed-node2 systemd[1]: libpod-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope: Consumed 33ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope completed and consumed the indicated resources. Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c)" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=info msg="Using sqlite as database backend" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f)" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=info msg="Using sqlite as database backend" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using graph driver overlay" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using graph root /var/lib/containers/storage" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using run root /run/containers/storage" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using tmp dir /run/libpod" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using transient store: false" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Cached value indicated that metacopy is being used" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Cached value indicated that native-diff is not being used" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Initializing event backend file" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=info msg="Setting parallel job count to 7" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using graph driver overlay" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using graph root /var/lib/containers/storage" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using run root /run/containers/storage" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using tmp dir /run/libpod" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using transient store: false" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Cached value indicated that metacopy is being used" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Cached value indicated that native-diff is not being used" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Initializing event backend file" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:26-04:00" level=info msg="Setting parallel job count to 7" Jul 19 12:38:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay-ea49505f4f2f1b3e93444d6d56c71c0ed1bb99d0acd9df081de88db0be157d45-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-ea49505f4f2f1b3e93444d6d56c71c0ed1bb99d0acd9df081de88db0be157d45-merged.mount has successfully entered the 'dead' state. Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f)" Jul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time="2025-07-19T12:38:26-04:00" level=debug msg="Shutting down engines" Jul 19 12:38:26 managed-node2 systemd[1]: libpod-conmon-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope has successfully entered the 'dead' state. Jul 19 12:38:26 managed-node2 kernel: cni-podman1: port 1(vethf4165b4a) entered disabled state Jul 19 12:38:26 managed-node2 kernel: device vethf4165b4a left promiscuous mode Jul 19 12:38:26 managed-node2 kernel: cni-podman1: port 1(vethf4165b4a) entered disabled state Jul 19 12:38:26 managed-node2 systemd[1]: run-netns-netns\x2df67fee73\x2d2bbe\x2d5ce9\x2d31b0\x2d8129b0eb7f47.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-netns-netns\x2df67fee73\x2d2bbe\x2d5ce9\x2d31b0\x2d8129b0eb7f47.mount has successfully entered the 'dead' state. Jul 19 12:38:27 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c-userdata-shm.mount has successfully entered the 'dead' state. Jul 19 12:38:27 managed-node2 systemd[1]: var-lib-containers-storage-overlay-6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e-merged.mount has successfully entered the 'dead' state. Jul 19 12:38:27 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:27-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c)" Jul 19 12:38:27 managed-node2 /usr/bin/podman[29585]: time="2025-07-19T12:38:27-04:00" level=debug msg="Shutting down engines" Jul 19 12:38:27 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 19 12:38:27 managed-node2 systemd[1]: Stopping libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope. -- Subject: Unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has begun shutting down. Jul 19 12:38:27 managed-node2 systemd[1]: libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has successfully entered the 'dead' state. Jul 19 12:38:27 managed-node2 systemd[1]: Stopped libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope. -- Subject: Unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has finished shutting down. Jul 19 12:38:27 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice. -- Subject: Unit machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice has finished shutting down. Jul 19 12:38:27 managed-node2 systemd[1]: machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice: Consumed 198ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice completed and consumed the indicated resources. Jul 19 12:38:27 managed-node2 podman[29570]: Pods stopped: Jul 19 12:38:27 managed-node2 podman[29570]: f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816 Jul 19 12:38:27 managed-node2 podman[29570]: Pods removed: Jul 19 12:38:27 managed-node2 podman[29570]: f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816 Jul 19 12:38:27 managed-node2 podman[29570]: Secrets removed: Jul 19 12:38:27 managed-node2 podman[29570]: Volumes removed: Jul 19 12:38:27 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice. -- Subject: Unit machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice has finished starting up. -- -- The start-up result is done. Jul 19 12:38:27 managed-node2 systemd[1]: Started libcontainer container 4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a. -- Subject: Unit libpod-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:27 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vetha38befe0: link is not ready Jul 19 12:38:27 managed-node2 systemd-udevd[29728]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Jul 19 12:38:27 managed-node2 systemd-udevd[29728]: Could not generate persistent MAC address for vetha38befe0: No such file or directory Jul 19 12:38:27 managed-node2 NetworkManager[661]: [1752943107.3392] manager: (vetha38befe0): new Veth device (/org/freedesktop/NetworkManager/Devices/5) Jul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered blocking state Jul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered disabled state Jul 19 12:38:27 managed-node2 kernel: device vetha38befe0 entered promiscuous mode Jul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered blocking state Jul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered forwarding state Jul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered disabled state Jul 19 12:38:27 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vetha38befe0: link becomes ready Jul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered blocking state Jul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered forwarding state Jul 19 12:38:27 managed-node2 NetworkManager[661]: [1752943107.3666] device (vetha38befe0): carrier: link connected Jul 19 12:38:27 managed-node2 NetworkManager[661]: [1752943107.3670] device (cni-podman1): carrier: link connected Jul 19 12:38:27 managed-node2 dnsmasq[29798]: listening on cni-podman1(#3): 10.89.0.1 Jul 19 12:38:27 managed-node2 dnsmasq[29798]: listening on cni-podman1(#3): fe80::ec2d:d5ff:fe97:9a6b%cni-podman1 Jul 19 12:38:27 managed-node2 dnsmasq[29802]: started, version 2.79 cachesize 150 Jul 19 12:38:27 managed-node2 dnsmasq[29802]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Jul 19 12:38:27 managed-node2 dnsmasq[29802]: using local addresses only for domain dns.podman Jul 19 12:38:27 managed-node2 dnsmasq[29802]: reading /etc/resolv.conf Jul 19 12:38:27 managed-node2 dnsmasq[29802]: using local addresses only for domain dns.podman Jul 19 12:38:27 managed-node2 dnsmasq[29802]: using nameserver 10.29.169.13#53 Jul 19 12:38:27 managed-node2 dnsmasq[29802]: using nameserver 10.29.170.12#53 Jul 19 12:38:27 managed-node2 dnsmasq[29802]: using nameserver 10.2.32.1#53 Jul 19 12:38:27 managed-node2 dnsmasq[29802]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Jul 19 12:38:27 managed-node2 systemd[1]: Started libcontainer container 64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3. -- Subject: Unit libpod-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:27 managed-node2 systemd[1]: Started libcontainer container d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425. -- Subject: Unit libpod-d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:27 managed-node2 podman[29570]: Pod: Jul 19 12:38:27 managed-node2 podman[29570]: 63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7 Jul 19 12:38:27 managed-node2 podman[29570]: Container: Jul 19 12:38:27 managed-node2 podman[29570]: d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425 Jul 19 12:38:27 managed-node2 systemd[1]: Started A template for running K8s workloads via podman-kube-play. -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished starting up. -- -- The start-up result is done. Jul 19 12:38:28 managed-node2 platform-python[29967]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:38:29 managed-node2 platform-python[30100]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:30 managed-node2 platform-python[30224]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:38:31 managed-node2 platform-python[30347]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:38:32 managed-node2 platform-python[30636]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:38:32 managed-node2 platform-python[30759]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:38:33 managed-node2 platform-python[30882]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 19 12:38:33 managed-node2 platform-python[30981]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752943113.1342852-16787-124035634200669/source _original_basename=tmpprx4cnlk follow=False checksum=6f620a32a353317135005413ecc9cbab44a8759d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Jul 19 12:38:34 managed-node2 platform-python[31106]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:38:34 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice. -- Subject: Unit machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice has finished starting up. -- -- The start-up result is done. Jul 19 12:38:34 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vethca04a2e2: link is not ready Jul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered blocking state Jul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered disabled state Jul 19 12:38:34 managed-node2 kernel: device vethca04a2e2 entered promiscuous mode Jul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered blocking state Jul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered forwarding state Jul 19 12:38:34 managed-node2 NetworkManager[661]: [1752943114.3363] manager: (vethca04a2e2): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Jul 19 12:38:34 managed-node2 systemd-udevd[31156]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Jul 19 12:38:34 managed-node2 systemd-udevd[31156]: Could not generate persistent MAC address for vethca04a2e2: No such file or directory Jul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered disabled state Jul 19 12:38:34 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready Jul 19 12:38:34 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready Jul 19 12:38:34 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vethca04a2e2: link becomes ready Jul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered blocking state Jul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered forwarding state Jul 19 12:38:34 managed-node2 NetworkManager[661]: [1752943114.3654] device (vethca04a2e2): carrier: link connected Jul 19 12:38:34 managed-node2 dnsmasq[29802]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 2 addresses Jul 19 12:38:34 managed-node2 systemd[1]: Started libpod-conmon-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope. -- Subject: Unit libpod-conmon-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:34 managed-node2 systemd[1]: Started libcontainer container 9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91. -- Subject: Unit libpod-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:34 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jul 19 12:38:34 managed-node2 systemd[1]: Started libpod-conmon-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope. -- Subject: Unit libpod-conmon-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:34 managed-node2 systemd[1]: Started libcontainer container 798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943. -- Subject: Unit libpod-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:35 managed-node2 platform-python[31387]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Jul 19 12:38:35 managed-node2 systemd[1]: Reloading. Jul 19 12:38:35 managed-node2 platform-python[31548]: ansible-systemd Invoked with name= scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None user=None Jul 19 12:38:35 managed-node2 systemd[1]: Reloading. Jul 19 12:38:36 managed-node2 platform-python[31703]: ansible-systemd Invoked with name= scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None Jul 19 12:38:36 managed-node2 systemd[1]: Starting A template for running K8s workloads via podman-kube-play... -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun starting up. Jul 19 12:38:36 managed-node2 systemd[1]: libpod-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope has successfully entered the 'dead' state. Jul 19 12:38:36 managed-node2 systemd[1]: libpod-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope: Consumed 34ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope completed and consumed the indicated resources. Jul 19 12:38:36 managed-node2 systemd[1]: libpod-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope has successfully entered the 'dead' state. Jul 19 12:38:36 managed-node2 systemd[1]: libpod-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope: Consumed 36ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope completed and consumed the indicated resources. Jul 19 12:38:36 managed-node2 dnsmasq[29802]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Jul 19 12:38:36 managed-node2 systemd[1]: var-lib-containers-storage-overlay-0e27a2de6423f234f5c5cc21592f99c374ae3f65ee2ffe512e2ea9260072c30b-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-0e27a2de6423f234f5c5cc21592f99c374ae3f65ee2ffe512e2ea9260072c30b-merged.mount has successfully entered the 'dead' state. Jul 19 12:38:36 managed-node2 systemd[1]: libpod-conmon-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope has successfully entered the 'dead' state. Jul 19 12:38:36 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered disabled state Jul 19 12:38:36 managed-node2 kernel: device vethca04a2e2 left promiscuous mode Jul 19 12:38:36 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered disabled state Jul 19 12:38:36 managed-node2 systemd[1]: run-netns-netns\x2dbc35cf78\x2d8b29\x2d812d\x2d8688\x2d6b3a472533c6.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-netns-netns\x2dbc35cf78\x2d8b29\x2d812d\x2d8688\x2d6b3a472533c6.mount has successfully entered the 'dead' state. Jul 19 12:38:36 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91-userdata-shm.mount has successfully entered the 'dead' state. Jul 19 12:38:36 managed-node2 systemd[1]: var-lib-containers-storage-overlay-50218d54ec26584adc0c1ba212de5d0b7c4329564918e9762d222c23ddef0ca1-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-50218d54ec26584adc0c1ba212de5d0b7c4329564918e9762d222c23ddef0ca1-merged.mount has successfully entered the 'dead' state. Jul 19 12:38:36 managed-node2 systemd[1]: libpod-conmon-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope has successfully entered the 'dead' state. Jul 19 12:38:36 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice. -- Subject: Unit machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice has finished shutting down. Jul 19 12:38:36 managed-node2 systemd[1]: machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice: Consumed 192ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice completed and consumed the indicated resources. Jul 19 12:38:36 managed-node2 podman[31710]: Pods stopped: Jul 19 12:38:36 managed-node2 podman[31710]: 7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a Jul 19 12:38:36 managed-node2 podman[31710]: Pods removed: Jul 19 12:38:36 managed-node2 podman[31710]: 7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a Jul 19 12:38:36 managed-node2 podman[31710]: Secrets removed: Jul 19 12:38:36 managed-node2 podman[31710]: Volumes removed: Jul 19 12:38:37 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice. -- Subject: Unit machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice has finished starting up. -- -- The start-up result is done. Jul 19 12:38:37 managed-node2 systemd[1]: Started libcontainer container c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c. -- Subject: Unit libpod-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:37 managed-node2 NetworkManager[661]: [1752943117.2271] manager: (vetha6d4d23e): new Veth device (/org/freedesktop/NetworkManager/Devices/7) Jul 19 12:38:37 managed-node2 systemd-udevd[31876]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Jul 19 12:38:37 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vetha6d4d23e: link is not ready Jul 19 12:38:37 managed-node2 systemd-udevd[31876]: Could not generate persistent MAC address for vetha6d4d23e: No such file or directory Jul 19 12:38:37 managed-node2 kernel: cni-podman1: port 2(vetha6d4d23e) entered blocking state Jul 19 12:38:37 managed-node2 kernel: cni-podman1: port 2(vetha6d4d23e) entered disabled state Jul 19 12:38:37 managed-node2 kernel: device vetha6d4d23e entered promiscuous mode Jul 19 12:38:37 managed-node2 kernel: cni-podman1: port 2(vetha6d4d23e) entered blocking state Jul 19 12:38:37 managed-node2 kernel: cni-podman1: port 2(vetha6d4d23e) entered forwarding state Jul 19 12:38:37 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vetha6d4d23e: link becomes ready Jul 19 12:38:37 managed-node2 NetworkManager[661]: [1752943117.2430] device (vetha6d4d23e): carrier: link connected Jul 19 12:38:37 managed-node2 dnsmasq[29802]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 2 addresses Jul 19 12:38:37 managed-node2 systemd[1]: Started libcontainer container 8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6. -- Subject: Unit libpod-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:37 managed-node2 systemd[1]: Started libcontainer container bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c. -- Subject: Unit libpod-bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.scope has finished starting up. -- -- The start-up result is done. Jul 19 12:38:37 managed-node2 podman[31710]: Pod: Jul 19 12:38:37 managed-node2 podman[31710]: 78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf Jul 19 12:38:37 managed-node2 podman[31710]: Container: Jul 19 12:38:37 managed-node2 podman[31710]: bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c Jul 19 12:38:37 managed-node2 systemd[1]: Started A template for running K8s workloads via podman-kube-play. -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished starting up. -- -- The start-up result is done. Jul 19 12:38:38 managed-node2 sudo[32108]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dlblsunulyjazdrjwbxpqfeiydnfqnsr ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943118.083963-17014-206060369573326/AnsiballZ_command.py' Jul 19 12:38:38 managed-node2 sudo[32108]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:38:38 managed-node2 platform-python[32111]: ansible-command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:38 managed-node2 systemd[25528]: Started podman-32120.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:38:38 managed-node2 sudo[32108]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:38:38 managed-node2 platform-python[32258]: ansible-command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:39 managed-node2 platform-python[32389]: ansible-command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:39 managed-node2 sudo[32520]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-eeofqvmjjevxeutcqzjhxoeuxycorapq ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943119.4551659-17079-9632647473971/AnsiballZ_command.py' Jul 19 12:38:39 managed-node2 sudo[32520]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:38:39 managed-node2 platform-python[32523]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:39 managed-node2 sudo[32520]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:38:40 managed-node2 platform-python[32649]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:40 managed-node2 platform-python[32775]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:41 managed-node2 platform-python[32901]: ansible-uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:38:41 managed-node2 platform-python[33025]: ansible-uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:38:41 managed-node2 rsyslogd[1019]: imjournal: journal files changed, reloading... [v8.2102.0-15.el8 try https://www.rsyslog.com/e/0 ] Jul 19 12:38:41 managed-node2 platform-python[33150]: ansible-command Invoked with _raw_params=ls -alrtF /tmp/lsr_s72m6vdk_podman/httpd1-create warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:42 managed-node2 platform-python[33274]: ansible-command Invoked with _raw_params=ls -alrtF /tmp/lsr_s72m6vdk_podman/httpd2-create warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:42 managed-node2 platform-python[33398]: ansible-command Invoked with _raw_params=ls -alrtF /tmp/lsr_s72m6vdk_podman/httpd3-create warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:45 managed-node2 platform-python[33647]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:38:46 managed-node2 platform-python[33775]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:38:49 managed-node2 platform-python[33900]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:38:52 managed-node2 platform-python[34023]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 19 12:38:52 managed-node2 platform-python[34150]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 19 12:38:53 managed-node2 platform-python[34277]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 19 12:38:55 managed-node2 platform-python[34400]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:38:58 managed-node2 platform-python[34523]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:39:01 managed-node2 platform-python[34646]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:39:03 managed-node2 platform-python[34769]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 19 12:39:05 managed-node2 platform-python[34930]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 19 12:39:06 managed-node2 platform-python[35053]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 19 12:39:10 managed-node2 platform-python[35176]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jul 19 12:39:11 managed-node2 platform-python[35300]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:11 managed-node2 platform-python[35425]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:12 managed-node2 platform-python[35549]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:13 managed-node2 platform-python[35673]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:14 managed-node2 platform-python[35797]: ansible-command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jul 19 12:39:14 managed-node2 platform-python[35920]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:15 managed-node2 platform-python[36043]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:15 managed-node2 sudo[36166]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wbomjbspvcxjkqucdsequagxlcclxnab ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943155.3657928-18727-255969634455656/AnsiballZ_podman_image.py' Jul 19 12:39:15 managed-node2 sudo[36166]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:39:15 managed-node2 systemd[25528]: Started podman-36171.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:39:15 managed-node2 systemd[25528]: Started podman-36179.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:39:15 managed-node2 systemd[25528]: Started podman-36187.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:39:15 managed-node2 systemd[25528]: Started podman-36195.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:39:15 managed-node2 systemd[25528]: Started podman-36203.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:39:16 managed-node2 systemd[25528]: Started podman-36211.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:39:16 managed-node2 sudo[36166]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:39:16 managed-node2 platform-python[36340]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:17 managed-node2 platform-python[36465]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:17 managed-node2 platform-python[36588]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 19 12:39:17 managed-node2 platform-python[36652]: ansible-file Invoked with owner=podman_basic_user group=3001 mode=0644 dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _original_basename=tmpvcvwgcl1 recurse=False state=file path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:18 managed-node2 sudo[36775]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mwgfktigaxpvesxpytdoaduxtkxpnkwn ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943158.1234493-18828-35756619876473/AnsiballZ_podman_play.py' Jul 19 12:39:18 managed-node2 sudo[36775]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:39:18 managed-node2 platform-python[36778]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:39:18 managed-node2 systemd[25528]: Started podman-36786.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:39:18 managed-node2 platform-python[36778]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jul 19 12:39:18 managed-node2 platform-python[36778]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Jul 19 12:39:18 managed-node2 platform-python[36778]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-07-19T12:39:18-04:00" level=info msg="/bin/podman filtering at log level debug" time="2025-07-19T12:39:18-04:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-07-19T12:39:18-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-07-19T12:39:18-04:00" level=info msg="Using sqlite as database backend" time="2025-07-19T12:39:18-04:00" level=debug msg="Using graph driver overlay" time="2025-07-19T12:39:18-04:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2025-07-19T12:39:18-04:00" level=debug msg="Using run root /run/user/3001/containers" time="2025-07-19T12:39:18-04:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2025-07-19T12:39:18-04:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2025-07-19T12:39:18-04:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2025-07-19T12:39:18-04:00" level=debug msg="Using transient store: false" time="2025-07-19T12:39:18-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-07-19T12:39:18-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-19T12:39:18-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-19T12:39:18-04:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2025-07-19T12:39:18-04:00" level=debug msg="Cached value indicated that native-diff is usable" time="2025-07-19T12:39:18-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2025-07-19T12:39:18-04:00" level=debug msg="Initializing event backend file" time="2025-07-19T12:39:18-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-07-19T12:39:18-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-07-19T12:39:18-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-07-19T12:39:18-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-07-19T12:39:18-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-07-19T12:39:18-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-07-19T12:39:18-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-07-19T12:39:18-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" time="2025-07-19T12:39:18-04:00" level=info msg="Setting parallel job count to 7" time="2025-07-19T12:39:18-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-19 12:38:13.521272 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-07-19T12:39:18-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-07-19T12:39:18-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-19T12:39:18-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:39:18-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-19T12:39:18-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\"" time="2025-07-19T12:39:18-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-19T12:39:18-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63)" time="2025-07-19T12:39:18-04:00" level=debug msg="exporting opaque data as blob \"sha256:9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\"" time="2025-07-19T12:39:18-04:00" level=debug msg="Pod using bridge network mode" time="2025-07-19T12:39:18-04:00" level=debug msg="Got pod cgroup as /libpod_parent/1846ecb89dbdb057faef33ff14bed3ee782f5fffa65b2fd38248f39e0fe82c96" Error: adding pod to state: name "httpd1" is in use: pod already exists time="2025-07-19T12:39:18-04:00" level=debug msg="Shutting down engines" Jul 19 12:39:18 managed-node2 platform-python[36778]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125 Jul 19 12:39:18 managed-node2 sudo[36775]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:39:19 managed-node2 platform-python[36940]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 19 12:39:20 managed-node2 platform-python[37064]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:21 managed-node2 platform-python[37189]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:22 managed-node2 platform-python[37313]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:22 managed-node2 platform-python[37436]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:23 managed-node2 platform-python[37727]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:24 managed-node2 platform-python[37852]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:24 managed-node2 platform-python[37975]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 19 12:39:24 managed-node2 platform-python[38039]: ansible-file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd2.yml _original_basename=tmpoct5ap5y recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd2.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:25 managed-node2 platform-python[38162]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:39:25 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19.slice. -- Subject: Unit machine-libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19.slice has finished starting up. -- -- The start-up result is done. Jul 19 12:39:25 managed-node2 platform-python[38162]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Jul 19 12:39:25 managed-node2 platform-python[38162]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Jul 19 12:39:25 managed-node2 platform-python[38162]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-07-19T12:39:25-04:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2025-07-19T12:39:25-04:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-07-19T12:39:25-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-07-19T12:39:25-04:00" level=info msg="Using sqlite as database backend" time="2025-07-19T12:39:25-04:00" level=debug msg="Using graph driver overlay" time="2025-07-19T12:39:25-04:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2025-07-19T12:39:25-04:00" level=debug msg="Using run root /run/containers/storage" time="2025-07-19T12:39:25-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2025-07-19T12:39:25-04:00" level=debug msg="Using tmp dir /run/libpod" time="2025-07-19T12:39:25-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2025-07-19T12:39:25-04:00" level=debug msg="Using transient store: false" time="2025-07-19T12:39:25-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-07-19T12:39:25-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-19T12:39:25-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-19T12:39:25-04:00" level=debug msg="Cached value indicated that metacopy is being used" time="2025-07-19T12:39:25-04:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2025-07-19T12:39:25-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2025-07-19T12:39:25-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2025-07-19T12:39:25-04:00" level=debug msg="Initializing event backend file" time="2025-07-19T12:39:25-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-07-19T12:39:25-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-07-19T12:39:25-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-07-19T12:39:25-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-07-19T12:39:25-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-07-19T12:39:25-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-07-19T12:39:25-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-07-19T12:39:25-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" time="2025-07-19T12:39:25-04:00" level=info msg="Setting parallel job count to 7" time="2025-07-19T12:39:25-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-19 12:35:55.640649556 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-07-19T12:39:25-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-07-19T12:39:25-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-19T12:39:25-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-19T12:39:25-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-07-19T12:39:25-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\"" time="2025-07-19T12:39:25-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-07-19T12:39:25-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a)" time="2025-07-19T12:39:25-04:00" level=debug msg="exporting opaque data as blob \"sha256:fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\"" time="2025-07-19T12:39:25-04:00" level=debug msg="Pod using bridge network mode" time="2025-07-19T12:39:25-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19.slice for parent machine.slice and name libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19" time="2025-07-19T12:39:25-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19.slice" time="2025-07-19T12:39:25-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19.slice" Error: adding pod to state: name "httpd2" is in use: pod already exists time="2025-07-19T12:39:25-04:00" level=debug msg="Shutting down engines" Jul 19 12:39:25 managed-node2 platform-python[38162]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125 Jul 19 12:39:26 managed-node2 platform-python[38323]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:27 managed-node2 platform-python[38448]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:28 managed-node2 platform-python[38572]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:29 managed-node2 platform-python[38695]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:30 managed-node2 platform-python[38984]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:31 managed-node2 platform-python[39109]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:31 managed-node2 platform-python[39232]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Jul 19 12:39:31 managed-node2 platform-python[39296]: ansible-file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd3.yml _original_basename=tmp8akhw61j recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd3.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:32 managed-node2 platform-python[39419]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:39:32 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_cb3b0a5bd9971330489767c2081b22911aea01140e5cd90be87bd649d4122b98.slice. -- Subject: Unit machine-libpod_pod_cb3b0a5bd9971330489767c2081b22911aea01140e5cd90be87bd649d4122b98.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_cb3b0a5bd9971330489767c2081b22911aea01140e5cd90be87bd649d4122b98.slice has finished starting up. -- -- The start-up result is done. Jul 19 12:39:33 managed-node2 sudo[39581]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hrgfwbfwhnrhozyrljgrwuftlafbcvgj ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943172.9515133-19598-262459141060618/AnsiballZ_command.py' Jul 19 12:39:33 managed-node2 sudo[39581]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:39:33 managed-node2 platform-python[39584]: ansible-command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:33 managed-node2 systemd[25528]: Started podman-39593.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:39:33 managed-node2 sudo[39581]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:39:33 managed-node2 platform-python[39723]: ansible-command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:34 managed-node2 platform-python[39854]: ansible-command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:34 managed-node2 sudo[39985]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kapxhijpsmqpbluyhztflsgdqehfqzsf ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943174.2658803-19637-151873258581255/AnsiballZ_command.py' Jul 19 12:39:34 managed-node2 sudo[39985]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:39:34 managed-node2 platform-python[39988]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:34 managed-node2 sudo[39985]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:39:34 managed-node2 platform-python[40114]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:35 managed-node2 platform-python[40240]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:35 managed-node2 platform-python[40366]: ansible-uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:36 managed-node2 platform-python[40490]: ansible-uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:36 managed-node2 platform-python[40614]: ansible-uri Invoked with url=http://localhost:15003/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:39 managed-node2 platform-python[40863]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:40 managed-node2 platform-python[40992]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:43 managed-node2 platform-python[41117]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jul 19 12:39:44 managed-node2 platform-python[41241]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:44 managed-node2 platform-python[41366]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:44 managed-node2 platform-python[41490]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:45 managed-node2 platform-python[41614]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:46 managed-node2 platform-python[41738]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:46 managed-node2 sudo[41863]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bsbqhhxxcupnkafozyypbjqjdnfplilf ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943186.6742442-20261-89535707061551/AnsiballZ_systemd.py' Jul 19 12:39:46 managed-node2 sudo[41863]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:39:47 managed-node2 platform-python[41866]: ansible-systemd Invoked with name= scope=user state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 19 12:39:47 managed-node2 systemd[25528]: Reloading. Jul 19 12:39:47 managed-node2 systemd[25528]: Stopping A template for running K8s workloads via podman-kube-play... -- Subject: Unit UNIT has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun shutting down. Jul 19 12:39:47 managed-node2 kernel: cni-podman1: port 1(veth24653eaf) entered disabled state Jul 19 12:39:47 managed-node2 kernel: device veth24653eaf left promiscuous mode Jul 19 12:39:47 managed-node2 kernel: cni-podman1: port 1(veth24653eaf) entered disabled state Jul 19 12:39:47 managed-node2 podman[41882]: Pods stopped: Jul 19 12:39:47 managed-node2 podman[41882]: 2d1d21944d4ca561d0a237f029182068a925bb49513d0d920c7ad527b530a87a Jul 19 12:39:47 managed-node2 podman[41882]: Pods removed: Jul 19 12:39:47 managed-node2 podman[41882]: 2d1d21944d4ca561d0a237f029182068a925bb49513d0d920c7ad527b530a87a Jul 19 12:39:47 managed-node2 podman[41882]: Secrets removed: Jul 19 12:39:47 managed-node2 podman[41882]: Volumes removed: Jul 19 12:39:47 managed-node2 systemd[25528]: Stopped A template for running K8s workloads via podman-kube-play. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 19 12:39:47 managed-node2 sudo[41863]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:39:48 managed-node2 platform-python[42156]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:48 managed-node2 sudo[42281]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rkhrtubvxxvbakhpbdfuonkadzawohqm ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943188.1900835-20342-278852314562176/AnsiballZ_podman_play.py' Jul 19 12:39:48 managed-node2 sudo[42281]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:39:48 managed-node2 platform-python[42284]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:39:48 managed-node2 platform-python[42284]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jul 19 12:39:48 managed-node2 systemd[25528]: Started podman-42292.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:39:48 managed-node2 platform-python[42284]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman kube play --down /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jul 19 12:39:48 managed-node2 platform-python[42284]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped: Pods removed: Secrets removed: Volumes removed: Jul 19 12:39:48 managed-node2 platform-python[42284]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: Jul 19 12:39:48 managed-node2 platform-python[42284]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jul 19 12:39:48 managed-node2 sudo[42281]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:39:48 managed-node2 platform-python[42421]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:50 managed-node2 platform-python[42544]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 19 12:39:50 managed-node2 platform-python[42668]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:51 managed-node2 platform-python[42793]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:52 managed-node2 platform-python[42917]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 19 12:39:52 managed-node2 systemd[1]: Reloading. Jul 19 12:39:52 managed-node2 systemd[1]: Stopping A template for running K8s workloads via podman-kube-play... -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun shutting down. Jul 19 12:39:52 managed-node2 systemd[1]: libpod-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.scope has successfully entered the 'dead' state. Jul 19 12:39:52 managed-node2 systemd[1]: libpod-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.scope: Consumed 33ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.scope completed and consumed the indicated resources. Jul 19 12:39:52 managed-node2 systemd[1]: libpod-d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.scope has successfully entered the 'dead' state. Jul 19 12:39:52 managed-node2 systemd[1]: libpod-d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.scope: Consumed 33ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.scope completed and consumed the indicated resources. Jul 19 12:39:52 managed-node2 dnsmasq[29802]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Jul 19 12:39:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay-1ac1165dcb590ce00bffba4600c63f5cfb3b70afb8f380b4edeace6635fcdfe3-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-1ac1165dcb590ce00bffba4600c63f5cfb3b70afb8f380b4edeace6635fcdfe3-merged.mount has successfully entered the 'dead' state. Jul 19 12:39:52 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered disabled state Jul 19 12:39:52 managed-node2 kernel: device vetha38befe0 left promiscuous mode Jul 19 12:39:52 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered disabled state Jul 19 12:39:52 managed-node2 systemd[1]: run-netns-netns\x2d85ef15c4\x2d2df7\x2d918e\x2d907f\x2dc88b265faa98.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-netns-netns\x2d85ef15c4\x2d2df7\x2d918e\x2d907f\x2dc88b265faa98.mount has successfully entered the 'dead' state. Jul 19 12:39:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3-userdata-shm.mount has successfully entered the 'dead' state. Jul 19 12:39:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay-38daa2f903ec0433792b188cb05d307d74de74874667479598255b129c8e533b-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-38daa2f903ec0433792b188cb05d307d74de74874667479598255b129c8e533b-merged.mount has successfully entered the 'dead' state. Jul 19 12:39:52 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice. -- Subject: Unit machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice has finished shutting down. Jul 19 12:39:52 managed-node2 systemd[1]: machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice: Consumed 66ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice completed and consumed the indicated resources. Jul 19 12:39:52 managed-node2 podman[42953]: Pods stopped: Jul 19 12:39:52 managed-node2 podman[42953]: 63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7 Jul 19 12:39:52 managed-node2 podman[42953]: Pods removed: Jul 19 12:39:52 managed-node2 podman[42953]: 63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7 Jul 19 12:39:52 managed-node2 podman[42953]: Secrets removed: Jul 19 12:39:52 managed-node2 podman[42953]: Volumes removed: Jul 19 12:39:52 managed-node2 systemd[1]: libpod-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.scope has successfully entered the 'dead' state. Jul 19 12:39:52 managed-node2 systemd[1]: libpod-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.scope: Consumed 34ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.scope completed and consumed the indicated resources. Jul 19 12:39:53 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a-userdata-shm.mount has successfully entered the 'dead' state. Jul 19 12:39:53 managed-node2 dnsmasq[29802]: exiting on receipt of SIGTERM Jul 19 12:39:53 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has successfully entered the 'dead' state. Jul 19 12:39:53 managed-node2 systemd[1]: Stopped A template for running K8s workloads via podman-kube-play. -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished shutting down. Jul 19 12:39:53 managed-node2 platform-python[43230]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:53 managed-node2 systemd[1]: var-lib-containers-storage-overlay-946e50296936b22c6a0cd6493841882848a8040824e6c32355272e3fbcd82469-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-946e50296936b22c6a0cd6493841882848a8040824e6c32355272e3fbcd82469-merged.mount has successfully entered the 'dead' state. Jul 19 12:39:53 managed-node2 platform-python[43355]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:39:53 managed-node2 platform-python[43355]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/httpd2.yml Jul 19 12:39:53 managed-node2 platform-python[43355]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman kube play --down /etc/containers/ansible-kubernetes.d/httpd2.yml Jul 19 12:39:53 managed-node2 platform-python[43355]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped: Pods removed: Secrets removed: Volumes removed: Jul 19 12:39:53 managed-node2 platform-python[43355]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: Jul 19 12:39:53 managed-node2 platform-python[43355]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jul 19 12:39:54 managed-node2 platform-python[43492]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:39:55 managed-node2 platform-python[43615]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:56 managed-node2 platform-python[43740]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:39:57 managed-node2 platform-python[43864]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 19 12:39:57 managed-node2 systemd[1]: Reloading. Jul 19 12:39:57 managed-node2 systemd[1]: Stopping A template for running K8s workloads via podman-kube-play... -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun shutting down. Jul 19 12:39:57 managed-node2 systemd[1]: libpod-bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.scope has successfully entered the 'dead' state. Jul 19 12:39:57 managed-node2 systemd[1]: libpod-bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.scope: Consumed 33ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.scope completed and consumed the indicated resources. Jul 19 12:39:57 managed-node2 systemd[1]: libpod-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.scope has successfully entered the 'dead' state. Jul 19 12:39:57 managed-node2 systemd[1]: libpod-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.scope: Consumed 33ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.scope completed and consumed the indicated resources. Jul 19 12:39:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay-e4d65582a94e2bdc8a1eaad3e5573271c39b373e604383029d5c678d2ac244a1-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-e4d65582a94e2bdc8a1eaad3e5573271c39b373e604383029d5c678d2ac244a1-merged.mount has successfully entered the 'dead' state. Jul 19 12:39:57 managed-node2 kernel: cni-podman1: port 2(vetha6d4d23e) entered disabled state Jul 19 12:39:57 managed-node2 kernel: device vetha6d4d23e left promiscuous mode Jul 19 12:39:57 managed-node2 kernel: cni-podman1: port 2(vetha6d4d23e) entered disabled state Jul 19 12:39:57 managed-node2 systemd[1]: run-netns-netns\x2d4c7240ed\x2da995\x2deb80\x2d20f7\x2d420676ba3f43.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-netns-netns\x2d4c7240ed\x2da995\x2deb80\x2d20f7\x2d420676ba3f43.mount has successfully entered the 'dead' state. Jul 19 12:39:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6-userdata-shm.mount has successfully entered the 'dead' state. Jul 19 12:39:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay-9dcce2629a28829a7d47e45b786a91b8326ac6500c27209769c1539d9f082e74-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-9dcce2629a28829a7d47e45b786a91b8326ac6500c27209769c1539d9f082e74-merged.mount has successfully entered the 'dead' state. Jul 19 12:39:57 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice. -- Subject: Unit machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice has finished shutting down. Jul 19 12:39:57 managed-node2 systemd[1]: machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice: Consumed 67ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice completed and consumed the indicated resources. Jul 19 12:39:58 managed-node2 podman[43900]: Pods stopped: Jul 19 12:39:58 managed-node2 podman[43900]: 78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf Jul 19 12:39:58 managed-node2 podman[43900]: Pods removed: Jul 19 12:39:58 managed-node2 podman[43900]: 78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf Jul 19 12:39:58 managed-node2 podman[43900]: Secrets removed: Jul 19 12:39:58 managed-node2 podman[43900]: Volumes removed: Jul 19 12:39:58 managed-node2 systemd[1]: libpod-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.scope has successfully entered the 'dead' state. Jul 19 12:39:58 managed-node2 systemd[1]: libpod-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.scope: Consumed 33ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.scope completed and consumed the indicated resources. Jul 19 12:39:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c-userdata-shm.mount has successfully entered the 'dead' state. Jul 19 12:39:58 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has successfully entered the 'dead' state. Jul 19 12:39:58 managed-node2 systemd[1]: Stopped A template for running K8s workloads via podman-kube-play. -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished shutting down. Jul 19 12:39:58 managed-node2 platform-python[44169]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:39:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay-c944fbd43e1673bcb0e2412bde6d753cffca05c01ef505aa29441df09b37e4f0-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-c944fbd43e1673bcb0e2412bde6d753cffca05c01ef505aa29441df09b37e4f0-merged.mount has successfully entered the 'dead' state. Jul 19 12:39:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 19 12:39:58 managed-node2 platform-python[44294]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 19 12:39:58 managed-node2 platform-python[44294]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/httpd3.yml Jul 19 12:39:59 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 19 12:39:59 managed-node2 platform-python[44430]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:40:00 managed-node2 platform-python[44553]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None Jul 19 12:40:01 managed-node2 platform-python[44677]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:01 managed-node2 sudo[44802]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zjfnlsrhavffrrytzlezjdprwiglzjsy ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943201.3476377-20989-80415109164671/AnsiballZ_podman_container_info.py' Jul 19 12:40:01 managed-node2 sudo[44802]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:40:01 managed-node2 platform-python[44805]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None Jul 19 12:40:01 managed-node2 systemd[25528]: Started podman-44807.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:40:01 managed-node2 sudo[44802]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:40:02 managed-node2 sudo[44936]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kqzzrdenthgaimpshaehnjtpvgbskyzt ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943201.9272537-21012-43022464511697/AnsiballZ_command.py' Jul 19 12:40:02 managed-node2 sudo[44936]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:40:02 managed-node2 platform-python[44939]: ansible-command Invoked with _raw_params=podman network ls -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:02 managed-node2 systemd[25528]: Started podman-44941.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:40:02 managed-node2 sudo[44936]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:40:02 managed-node2 sudo[45096]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wfpjnxfzctwnxksvtryonkmxvjvkhduq ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943202.41182-21042-156572471435781/AnsiballZ_command.py' Jul 19 12:40:02 managed-node2 sudo[45096]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:40:02 managed-node2 platform-python[45099]: ansible-command Invoked with _raw_params=podman secret ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:02 managed-node2 systemd[25528]: Started podman-45101.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:40:02 managed-node2 sudo[45096]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:40:03 managed-node2 platform-python[45230]: ansible-command Invoked with removes=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl disable-linger podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None stdin=None Jul 19 12:40:03 managed-node2 systemd[1]: Stopping User Manager for UID 3001... -- Subject: Unit user@3001.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@3001.service has begun shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Stopped target Default. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Stopping D-Bus User Message Bus... -- Subject: Unit UNIT has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Removed slice podman\x2dkube.slice. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Stopping podman-pause-1458d7a0.scope. -- Subject: Unit UNIT has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Stopped D-Bus User Message Bus. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Stopped target Basic System. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Stopped target Timers. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Stopped Mark boot as successful after the user session has run 2 minutes. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Stopped target Sockets. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Stopped target Paths. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Closed D-Bus User Message Bus Socket. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Stopped podman-pause-1458d7a0.scope. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Removed slice user.slice. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[25528]: Reached target Shutdown. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:40:03 managed-node2 systemd[25528]: Started Exit the Session. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:40:03 managed-node2 systemd[25528]: Reached target Exit the Session. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jul 19 12:40:03 managed-node2 systemd[1]: user@3001.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit user@3001.service has successfully entered the 'dead' state. Jul 19 12:40:03 managed-node2 systemd[1]: Stopped User Manager for UID 3001. -- Subject: Unit user@3001.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@3001.service has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[1]: Stopping User runtime directory /run/user/3001... -- Subject: Unit user-runtime-dir@3001.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@3001.service has begun shutting down. Jul 19 12:40:03 managed-node2 systemd[1]: run-user-3001.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-user-3001.mount has successfully entered the 'dead' state. Jul 19 12:40:03 managed-node2 systemd[1]: user-runtime-dir@3001.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit user-runtime-dir@3001.service has successfully entered the 'dead' state. Jul 19 12:40:03 managed-node2 systemd[1]: Stopped User runtime directory /run/user/3001. -- Subject: Unit user-runtime-dir@3001.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@3001.service has finished shutting down. Jul 19 12:40:03 managed-node2 systemd[1]: Removed slice User Slice of UID 3001. -- Subject: Unit user-3001.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-3001.slice has finished shutting down. Jul 19 12:40:03 managed-node2 platform-python[45362]: ansible-command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:04 managed-node2 sudo[45486]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dlpvabpxjyeqjdweosdqhelprmnnraei ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943204.1405673-21146-194288526342265/AnsiballZ_command.py' Jul 19 12:40:04 managed-node2 sudo[45486]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:40:04 managed-node2 platform-python[45489]: ansible-command Invoked with _raw_params=podman pod exists httpd1 warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:04 managed-node2 sudo[45486]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:40:04 managed-node2 platform-python[45619]: ansible-command Invoked with _raw_params=podman pod exists httpd2 warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:05 managed-node2 platform-python[45749]: ansible-command Invoked with _raw_params=podman pod exists httpd3 warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:05 managed-node2 sudo[45879]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uhnqljkrbofbppqpfvsxbbfqaxcbzbnz ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943205.3022838-21198-91872805319796/AnsiballZ_command.py' Jul 19 12:40:05 managed-node2 sudo[45879]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Jul 19 12:40:05 managed-node2 platform-python[45882]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:05 managed-node2 sudo[45879]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 19 12:40:05 managed-node2 platform-python[46008]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:06 managed-node2 platform-python[46134]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:06 managed-node2 platform-python[46260]: ansible-stat Invoked with path=/var/lib/systemd/linger/podman_basic_user follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:09 managed-node2 platform-python[46508]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:10 managed-node2 platform-python[46637]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 19 12:40:11 managed-node2 platform-python[46761]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:13 managed-node2 platform-python[46886]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jul 19 12:40:14 managed-node2 platform-python[47010]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:14 managed-node2 platform-python[47135]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:15 managed-node2 platform-python[47259]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:16 managed-node2 platform-python[47383]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:16 managed-node2 platform-python[47507]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:17 managed-node2 platform-python[47630]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:17 managed-node2 platform-python[47753]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:40:19 managed-node2 platform-python[47876]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 19 12:40:19 managed-node2 platform-python[48000]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:20 managed-node2 platform-python[48125]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:21 managed-node2 platform-python[48249]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 19 12:40:22 managed-node2 platform-python[48376]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:22 managed-node2 platform-python[48499]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:40:23 managed-node2 platform-python[48622]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:25 managed-node2 platform-python[48747]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:25 managed-node2 platform-python[48871]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None Jul 19 12:40:26 managed-node2 platform-python[48998]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:26 managed-node2 platform-python[49121]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:40:28 managed-node2 platform-python[49244]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None Jul 19 12:40:28 managed-node2 platform-python[49368]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:29 managed-node2 platform-python[49491]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:40:29 managed-node2 platform-python[49614]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:40:32 managed-node2 platform-python[49776]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version', 'os_family'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d Jul 19 12:40:33 managed-node2 platform-python[49903]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:33 managed-node2 platform-python[50026]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:36 managed-node2 platform-python[50274]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:37 managed-node2 platform-python[50403]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 19 12:40:37 managed-node2 platform-python[50527]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:41 managed-node2 platform-python[50691]: ansible-setup Invoked with gather_subset=['all'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d Jul 19 12:40:44 managed-node2 platform-python[50843]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:45 managed-node2 platform-python[50966]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:47 managed-node2 platform-python[51214]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:48 managed-node2 platform-python[51343]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 19 12:40:48 managed-node2 platform-python[51467]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:54 managed-node2 platform-python[51631]: ansible-setup Invoked with gather_subset=['all'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d Jul 19 12:40:54 managed-node2 platform-python[51783]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:40:55 managed-node2 platform-python[51906]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:40:56 managed-node2 platform-python[52030]: ansible-dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:40:59 managed-node2 platform-python[52158]: ansible-dnf Invoked with name=['certmonger'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:41:02 managed-node2 dbus-daemon[591]: [system] Reloaded configuration Jul 19 12:41:02 managed-node2 dbus-daemon[591]: [system] Reloaded configuration Jul 19 12:41:02 managed-node2 dbus-daemon[591]: [system] Reloaded configuration Jul 19 12:41:02 managed-node2 dbus-daemon[591]: [system] Reloaded configuration Jul 19 12:41:02 managed-node2 dbus-daemon[591]: [system] Reloaded configuration Jul 19 12:41:02 managed-node2 systemd[1]: Reloading. Jul 19 12:41:02 managed-node2 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. -- Subject: Unit run-r2a2b61b169e54534b6bc9888468488f1.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit run-r2a2b61b169e54534b6bc9888468488f1.service has finished starting up. -- -- The start-up result is done. Jul 19 12:41:02 managed-node2 systemd[1]: Starting man-db-cache-update.service... -- Subject: Unit man-db-cache-update.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has begun starting up. Jul 19 12:41:02 managed-node2 systemd[1]: Reloading. Jul 19 12:41:03 managed-node2 systemd[1]: man-db-cache-update.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit man-db-cache-update.service has successfully entered the 'dead' state. Jul 19 12:41:03 managed-node2 systemd[1]: Started man-db-cache-update.service. -- Subject: Unit man-db-cache-update.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has finished starting up. -- -- The start-up result is done. Jul 19 12:41:03 managed-node2 systemd[1]: run-r2a2b61b169e54534b6bc9888468488f1.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-r2a2b61b169e54534b6bc9888468488f1.service has successfully entered the 'dead' state. Jul 19 12:41:04 managed-node2 platform-python[52790]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:41:04 managed-node2 platform-python[52913]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:41:05 managed-node2 platform-python[53036]: ansible-systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 19 12:41:05 managed-node2 systemd[1]: Reloading. Jul 19 12:41:05 managed-node2 systemd[1]: Starting dnf makecache... -- Subject: Unit dnf-makecache.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit dnf-makecache.service has begun starting up. Jul 19 12:41:05 managed-node2 systemd[1]: Starting Certificate monitoring and PKI enrollment... -- Subject: Unit certmonger.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit certmonger.service has begun starting up. Jul 19 12:41:05 managed-node2 systemd[1]: Started Certificate monitoring and PKI enrollment. -- Subject: Unit certmonger.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit certmonger.service has finished starting up. -- -- The start-up result is done. Jul 19 12:41:05 managed-node2 dnf[53070]: Failed determining last makecache time. Jul 19 12:41:05 managed-node2 dnf[53070]: CentOS Stream 8 - AppStream 124 kB/s | 4.4 kB 00:00 Jul 19 12:41:05 managed-node2 dnf[53070]: CentOS Stream 8 - BaseOS 100 kB/s | 3.9 kB 00:00 Jul 19 12:41:06 managed-node2 dnf[53070]: CentOS Stream 8 - Extras 83 kB/s | 2.9 kB 00:00 Jul 19 12:41:06 managed-node2 platform-python[53233]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=# # Ansible managed # # system_role:certificate booted=True provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None Jul 19 12:41:06 managed-node2 dnf[53070]: CentOS Stream 8 - Extras common packages 74 kB/s | 3.0 kB 00:00 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 dnf[53070]: CentOS Stream 8 - HighAvailability 36 kB/s | 3.9 kB 00:00 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 certmonger[53258]: Certificate in file "/etc/pki/tls/certs/quadlet_demo.crt" issued by CA and saved. Jul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:06 managed-node2 dnf[53070]: Beaker Client - RedHatEnterpriseLinux8 8.8 kB/s | 1.5 kB 00:00 Jul 19 12:41:06 managed-node2 dnf[53070]: Beaker harness 13 kB/s | 1.3 kB 00:00 Jul 19 12:41:06 managed-node2 dnf[53070]: Copr repo for beakerlib-libraries owned by bgon 13 kB/s | 1.8 kB 00:00 Jul 19 12:41:06 managed-node2 dnf[53070]: Extra Packages for Enterprise Linux 8 - x86_64 413 kB/s | 35 kB 00:00 Jul 19 12:41:06 managed-node2 platform-python[53383]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Jul 19 12:41:07 managed-node2 dnf[53070]: Extra Packages for Enterprise Linux 8 - x86_64 281 kB/s | 21 kB 00:00 Jul 19 12:41:07 managed-node2 platform-python[53508]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key Jul 19 12:41:07 managed-node2 dnf[53070]: Extra Packages for Enterprise Linux 8 - x86_64 81 kB/s | 25 kB 00:00 Jul 19 12:41:07 managed-node2 dnf[53070]: Copr repo for qa-tools owned by lpol 33 kB/s | 1.8 kB 00:00 Jul 19 12:41:07 managed-node2 platform-python[53632]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Jul 19 12:41:08 managed-node2 platform-python[53755]: ansible-command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:41:08 managed-node2 certmonger[53073]: 2025-07-19 12:41:08 [53073] Wrote to /var/lib/certmonger/requests/20250719164106 Jul 19 12:41:08 managed-node2 platform-python[53879]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:41:09 managed-node2 platform-python[54002]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:41:09 managed-node2 platform-python[54125]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Jul 19 12:41:09 managed-node2 dnf[53070]: Metadata cache created. Jul 19 12:41:10 managed-node2 systemd[1]: dnf-makecache.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit dnf-makecache.service has successfully entered the 'dead' state. Jul 19 12:41:10 managed-node2 systemd[1]: Started dnf makecache. -- Subject: Unit dnf-makecache.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit dnf-makecache.service has finished starting up. -- -- The start-up result is done. Jul 19 12:41:10 managed-node2 platform-python[54249]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:41:10 managed-node2 platform-python[54372]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:41:13 managed-node2 platform-python[54620]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:41:14 managed-node2 platform-python[54749]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 19 12:41:14 managed-node2 platform-python[54873]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:41:16 managed-node2 platform-python[54998]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:41:17 managed-node2 platform-python[55121]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:41:17 managed-node2 platform-python[55244]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:41:18 managed-node2 platform-python[55368]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:41:21 managed-node2 platform-python[55491]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 19 12:41:21 managed-node2 platform-python[55618]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 19 12:41:22 managed-node2 platform-python[55745]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 19 12:41:23 managed-node2 platform-python[55868]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 19 12:41:25 managed-node2 platform-python[55991]: ansible-command Invoked with _raw_params=journalctl -ex warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:41:25 managed-node2 platform-python[56115]: ansible-command Invoked with _raw_params=podman ps -a warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:41:25 managed-node2 systemd[1]: var-lib-containers-storage-overlay-metacopy\x2dcheck670583346-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-metacopy\x2dcheck670583346-merged.mount has successfully entered the 'dead' state. Jul 19 12:41:25 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 19 12:41:26 managed-node2 platform-python[56245]: ansible-command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:41:26 managed-node2 platform-python[56375]: ansible-command Invoked with _raw_params=set -euo pipefail; systemctl list-units --all | grep quadlet _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:41:26 managed-node2 platform-python[56501]: ansible-command Invoked with _raw_params=ls -alrtF /etc/systemd/system warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:41:29 managed-node2 platform-python[56750]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:41:30 managed-node2 platform-python[56879]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 19 12:41:32 managed-node2 platform-python[57004]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jul 19 12:41:35 managed-node2 platform-python[57127]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None Jul 19 12:41:35 managed-node2 platform-python[57254]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None Jul 19 12:41:36 managed-node2 platform-python[57381]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 19 12:41:36 managed-node2 platform-python[57504]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 19 12:41:38 managed-node2 platform-python[57627]: ansible-command Invoked with _raw_params=exec 1>&2 set -x set -o pipefail systemctl list-units --plain -l --all | grep quadlet || : systemctl list-unit-files --all | grep quadlet || : systemctl list-units --plain --failed -l --all | grep quadlet || : _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 19 12:41:39 managed-node2 platform-python[57757]: ansible-command Invoked with _raw_params=journalctl -ex warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None PLAY RECAP ********************************************************************* managed-node2 : ok=90 changed=8 unreachable=0 failed=2 skipped=140 rescued=2 ignored=0 SYSTEM ROLES ERRORS BEGIN v1 [ { "ansible_version": "2.9.27", "end_time": "2025-07-19T16:41:24.872030+00:00Z", "host": "managed-node2", "message": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "start_time": "2025-07-19T16:41:24.847853+00:00Z", "task_name": "Manage each secret", "task_path": "/tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41" }, { "ansible_version": "2.9.27", "delta": "0:00:00.027461", "end_time": "2025-07-19 12:41:25.259357", "host": "managed-node2", "message": "No message could be found", "rc": 0, "start_time": "2025-07-19 12:41:25.231896", "stdout": "-- Logs begin at Sat 2025-07-19 12:30:11 EDT, end at Sat 2025-07-19 12:41:25 EDT. --\nJul 19 12:35:22 managed-node2 platform-python[11895]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:35:23 managed-node2 platform-python[12024]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 19 12:35:23 managed-node2 platform-python[12148]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:35:24 managed-node2 platform-python[12273]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:35:25 managed-node2 platform-python[12396]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:35:25 managed-node2 platform-python[12519]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:35:26 managed-node2 platform-python[12643]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:35:29 managed-node2 platform-python[12766]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 19 12:35:30 managed-node2 platform-python[12893]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 19 12:35:30 managed-node2 systemd[1]: Reloading.\nJul 19 12:35:30 managed-node2 systemd[1]: Starting firewalld - dynamic firewall daemon...\n-- Subject: Unit firewalld.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit firewalld.service has begun starting up.\nJul 19 12:35:31 managed-node2 systemd[1]: Started firewalld - dynamic firewall daemon.\n-- Subject: Unit firewalld.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit firewalld.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:35:31 managed-node2 firewalld[12929]: WARNING: AllowZoneDrifting is enabled. This is considered an insecure configuration option. It will be removed in a future release. Please consider disabling it now.\nJul 19 12:35:32 managed-node2 platform-python[13115]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 19 12:35:33 managed-node2 platform-python[13238]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:35:33 managed-node2 platform-python[13361]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:35:34 managed-node2 platform-python[13484]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:35:37 managed-node2 platform-python[13607]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:35:39 managed-node2 platform-python[13730]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:35:42 managed-node2 dbus-daemon[591]: [system] Reloaded configuration\nJul 19 12:35:42 managed-node2 dbus-daemon[591]: [system] Reloaded configuration\nJul 19 12:35:42 managed-node2 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update.\n-- Subject: Unit run-r58006eb3d48a46a9a552c0899f8af7ac.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit run-r58006eb3d48a46a9a552c0899f8af7ac.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:35:42 managed-node2 systemd[1]: Starting man-db-cache-update.service...\n-- Subject: Unit man-db-cache-update.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit man-db-cache-update.service has begun starting up.\nJul 19 12:35:42 managed-node2 systemd[1]: man-db-cache-update.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit man-db-cache-update.service has successfully entered the 'dead' state.\nJul 19 12:35:42 managed-node2 systemd[1]: Started man-db-cache-update.service.\n-- Subject: Unit man-db-cache-update.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit man-db-cache-update.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:35:42 managed-node2 systemd[1]: run-r58006eb3d48a46a9a552c0899f8af7ac.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-r58006eb3d48a46a9a552c0899f8af7ac.service has successfully entered the 'dead' state.\nJul 19 12:35:43 managed-node2 platform-python[14335]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 19 12:35:43 managed-node2 platform-python[14483]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:35:45 managed-node2 platform-python[14607]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 19 12:35:46 managed-node2 kernel: SELinux: Converting 460 SID table entries...\nJul 19 12:35:46 managed-node2 kernel: SELinux: policy capability network_peer_controls=1\nJul 19 12:35:46 managed-node2 kernel: SELinux: policy capability open_perms=1\nJul 19 12:35:46 managed-node2 kernel: SELinux: policy capability extended_socket_class=1\nJul 19 12:35:46 managed-node2 kernel: SELinux: policy capability always_check_network=0\nJul 19 12:35:46 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1\nJul 19 12:35:46 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1\nJul 19 12:35:46 managed-node2 dbus-daemon[591]: [system] Reloaded configuration\nJul 19 12:35:47 managed-node2 platform-python[14734]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 19 12:35:51 managed-node2 platform-python[14857]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:35:53 managed-node2 platform-python[14982]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:35:54 managed-node2 platform-python[15105]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:35:54 managed-node2 platform-python[15228]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 19 12:35:54 managed-node2 platform-python[15327]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/nopull.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752942954.3752043-9946-72044176595742/source _original_basename=tmpi7ylefvg follow=False checksum=d5dc917e3cae36de03aa971a17ac473f86fdf934 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None\nJul 19 12:35:55 managed-node2 platform-python[15452]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:35:55 managed-node2 kernel: evm: overlay not supported\nJul 19 12:35:55 managed-node2 systemd[1]: var-lib-containers-storage-overlay-metacopy\\x2dcheck103626253-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-metacopy\\x2dcheck103626253-merged.mount has successfully entered the 'dead' state.\nJul 19 12:35:55 managed-node2 systemd[1]: Created slice machine.slice.\n-- Subject: Unit machine.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:35:55 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice.\n-- Subject: Unit machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:35:56 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 19 12:36:00 managed-node2 platform-python[15778]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:36:01 managed-node2 platform-python[15907]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:36:04 managed-node2 platform-python[16032]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:36:07 managed-node2 platform-python[16155]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 19 12:36:08 managed-node2 platform-python[16282]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 19 12:36:09 managed-node2 platform-python[16409]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 19 12:36:10 managed-node2 platform-python[16532]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:36:13 managed-node2 platform-python[16655]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:36:16 managed-node2 platform-python[16778]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:36:19 managed-node2 platform-python[16901]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 19 12:36:21 managed-node2 platform-python[17049]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 19 12:36:22 managed-node2 platform-python[17172]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 19 12:36:26 managed-node2 platform-python[17295]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:36:28 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 19 12:36:28 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 19 12:36:29 managed-node2 platform-python[17558]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:36:29 managed-node2 platform-python[17681]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:36:29 managed-node2 platform-python[17804]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 19 12:36:30 managed-node2 platform-python[17903]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/bogus.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752942989.6065838-11409-115866441513393/source _original_basename=tmpinaqg9cl follow=False checksum=f8266a972ed3be7e204d2a67883fe3a22b8dbf18 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None\nJul 19 12:36:30 managed-node2 platform-python[18028]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:36:30 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice.\n-- Subject: Unit machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:36:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 19 12:36:34 managed-node2 platform-python[18315]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:36:35 managed-node2 platform-python[18444]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:36:37 managed-node2 platform-python[18569]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:36:40 managed-node2 platform-python[18692]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 19 12:36:41 managed-node2 platform-python[18819]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 19 12:36:41 managed-node2 platform-python[18946]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 19 12:36:43 managed-node2 platform-python[19069]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:36:46 managed-node2 platform-python[19192]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:36:49 managed-node2 platform-python[19315]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:36:52 managed-node2 platform-python[19438]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 19 12:36:54 managed-node2 platform-python[19586]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 19 12:36:54 managed-node2 platform-python[19709]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 19 12:36:59 managed-node2 platform-python[19832]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:37:00 managed-node2 platform-python[19957]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:37:01 managed-node2 platform-python[20081]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 19 12:37:01 managed-node2 platform-python[20208]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:37:02 managed-node2 platform-python[20333]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:37:02 managed-node2 platform-python[20333]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/nopull.yml\nJul 19 12:37:02 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice.\n-- Subject: Unit machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice has finished shutting down.\nJul 19 12:37:02 managed-node2 systemd[1]: machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice: Consumed 0 CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice completed and consumed the indicated resources.\nJul 19 12:37:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 19 12:37:02 managed-node2 platform-python[20471]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:37:03 managed-node2 platform-python[20594]: ansible-command Invoked with _raw_params=podman image prune -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:37:06 managed-node2 platform-python[20849]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:37:07 managed-node2 platform-python[20978]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:37:10 managed-node2 platform-python[21103]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:37:13 managed-node2 platform-python[21226]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 19 12:37:14 managed-node2 platform-python[21353]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 19 12:37:15 managed-node2 platform-python[21480]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 19 12:37:16 managed-node2 platform-python[21603]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:37:19 managed-node2 platform-python[21726]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:37:22 managed-node2 platform-python[21849]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:37:25 managed-node2 platform-python[21972]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 19 12:37:27 managed-node2 platform-python[22120]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 19 12:37:28 managed-node2 platform-python[22243]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 19 12:37:32 managed-node2 platform-python[22366]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:37:34 managed-node2 platform-python[22491]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:37:34 managed-node2 platform-python[22615]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 19 12:37:35 managed-node2 platform-python[22742]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:37:35 managed-node2 platform-python[22867]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:37:35 managed-node2 platform-python[22867]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/bogus.yml\nJul 19 12:37:35 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice.\n-- Subject: Unit machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice has finished shutting down.\nJul 19 12:37:35 managed-node2 systemd[1]: machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice: Consumed 0 CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice completed and consumed the indicated resources.\nJul 19 12:37:35 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 19 12:37:36 managed-node2 platform-python[23006]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:37:36 managed-node2 platform-python[23129]: ansible-command Invoked with _raw_params=podman image prune -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:37:40 managed-node2 platform-python[23384]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:37:41 managed-node2 platform-python[23513]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:37:44 managed-node2 platform-python[23638]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:37:47 managed-node2 platform-python[23761]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 19 12:37:47 managed-node2 platform-python[23888]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 19 12:37:48 managed-node2 platform-python[24015]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 19 12:37:50 managed-node2 platform-python[24138]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:37:52 managed-node2 platform-python[24261]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:37:55 managed-node2 platform-python[24384]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:37:58 managed-node2 platform-python[24507]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 19 12:38:00 managed-node2 platform-python[24655]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 19 12:38:01 managed-node2 platform-python[24778]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 19 12:38:05 managed-node2 platform-python[24901]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None\nJul 19 12:38:06 managed-node2 platform-python[25025]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:38:06 managed-node2 platform-python[25150]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:06 managed-node2 platform-python[25274]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:08 managed-node2 platform-python[25398]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:08 managed-node2 platform-python[25522]: ansible-command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None\nJul 19 12:38:08 managed-node2 systemd[1]: Created slice User Slice of UID 3001.\n-- Subject: Unit user-3001.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user-3001.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:08 managed-node2 systemd[1]: Starting User runtime directory /run/user/3001...\n-- Subject: Unit user-runtime-dir@3001.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user-runtime-dir@3001.service has begun starting up.\nJul 19 12:38:08 managed-node2 systemd[1]: Started User runtime directory /run/user/3001.\n-- Subject: Unit user-runtime-dir@3001.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user-runtime-dir@3001.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:08 managed-node2 systemd[1]: Starting User Manager for UID 3001...\n-- Subject: Unit user@3001.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user@3001.service has begun starting up.\nJul 19 12:38:08 managed-node2 systemd[25528]: pam_unix(systemd-user:session): session opened for user podman_basic_user by (uid=0)\nJul 19 12:38:08 managed-node2 systemd[25528]: Reached target Paths.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:08 managed-node2 systemd[25528]: Starting D-Bus User Message Bus Socket.\n-- Subject: Unit UNIT has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has begun starting up.\nJul 19 12:38:08 managed-node2 systemd[25528]: Started Mark boot as successful after the user session has run 2 minutes.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:08 managed-node2 systemd[25528]: Reached target Timers.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:08 managed-node2 systemd[25528]: Listening on D-Bus User Message Bus Socket.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:08 managed-node2 systemd[25528]: Reached target Sockets.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:08 managed-node2 systemd[25528]: Reached target Basic System.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:08 managed-node2 systemd[25528]: Reached target Default.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:08 managed-node2 systemd[25528]: Startup finished in 28ms.\n-- Subject: User manager start-up is now complete\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The user manager instance for user 3001 has been started. All services queued\n-- for starting have been started. Note that other services might still be starting\n-- up or be started at any later time.\n-- \n-- Startup of the manager took 28712 microseconds.\nJul 19 12:38:08 managed-node2 systemd[1]: Started User Manager for UID 3001.\n-- Subject: Unit user@3001.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user@3001.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:09 managed-node2 platform-python[25663]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:38:10 managed-node2 platform-python[25786]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:38:10 managed-node2 sudo[25909]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ziadsxoqzpztrsztgsdmjtfdqrqqbghq ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943090.202407-15734-110385486361950/AnsiballZ_podman_image.py'\nJul 19 12:38:10 managed-node2 sudo[25909]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:38:10 managed-node2 systemd[25528]: Started D-Bus User Message Bus.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:10 managed-node2 systemd[25528]: Created slice user.slice.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:10 managed-node2 systemd[25528]: Started podman-25921.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:10 managed-node2 systemd[25528]: Started podman-pause-1458d7a0.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:10 managed-node2 systemd[25528]: Started podman-25939.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:11 managed-node2 systemd[25528]: Started podman-25955.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:11 managed-node2 sudo[25909]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:38:11 managed-node2 platform-python[26084]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:38:12 managed-node2 platform-python[26207]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:38:12 managed-node2 platform-python[26330]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 19 12:38:13 managed-node2 platform-python[26429]: ansible-copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752943092.4331284-15846-26644570363473/source _original_basename=tmp0dg28w0o follow=False checksum=fe0b16bd085957dfbf8e2496934305469d165478 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None\nJul 19 12:38:13 managed-node2 sudo[26554]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-louqgipewhnyaovmbewiqaddtljctvmr ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943093.148976-15887-61767546226259/AnsiballZ_podman_play.py'\nJul 19 12:38:13 managed-node2 sudo[26554]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:38:13 managed-node2 platform-python[26557]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:38:13 managed-node2 systemd[25528]: Started podman-26565.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:13 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6\nJul 19 12:38:13 managed-node2 systemd[25528]: Started rootless-netns-6ed4b4b3.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:13 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this.\nJul 19 12:38:13 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vetha780888b: link is not ready\nJul 19 12:38:13 managed-node2 kernel: cni-podman1: port 1(vetha780888b) entered blocking state\nJul 19 12:38:13 managed-node2 kernel: cni-podman1: port 1(vetha780888b) entered disabled state\nJul 19 12:38:13 managed-node2 kernel: device vetha780888b entered promiscuous mode\nJul 19 12:38:13 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready\nJul 19 12:38:13 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready\nJul 19 12:38:13 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vetha780888b: link becomes ready\nJul 19 12:38:13 managed-node2 kernel: cni-podman1: port 1(vetha780888b) entered blocking state\nJul 19 12:38:13 managed-node2 kernel: cni-podman1: port 1(vetha780888b) entered forwarding state\nJul 19 12:38:14 managed-node2 dnsmasq[26752]: listening on cni-podman1(#3): 10.89.0.1\nJul 19 12:38:14 managed-node2 dnsmasq[26754]: started, version 2.79 cachesize 150\nJul 19 12:38:14 managed-node2 dnsmasq[26754]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify\nJul 19 12:38:14 managed-node2 dnsmasq[26754]: using local addresses only for domain dns.podman\nJul 19 12:38:14 managed-node2 dnsmasq[26754]: reading /etc/resolv.conf\nJul 19 12:38:14 managed-node2 dnsmasq[26754]: using local addresses only for domain dns.podman\nJul 19 12:38:14 managed-node2 dnsmasq[26754]: using nameserver 10.0.2.3#53\nJul 19 12:38:14 managed-node2 dnsmasq[26754]: using nameserver 10.29.169.13#53\nJul 19 12:38:14 managed-node2 dnsmasq[26754]: using nameserver 10.29.170.12#53\nJul 19 12:38:14 managed-node2 dnsmasq[26754]: using nameserver 10.2.32.1#53\nJul 19 12:38:14 managed-node2 dnsmasq[26754]: read /run/user/3001/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses\nJul 19 12:38:14 managed-node2 conmon[26767]: conmon f153d4517c8778d9470c : failed to write to /proc/self/oom_score_adj: Permission denied\nJul 19 12:38:14 managed-node2 conmon[26768]: conmon f153d4517c8778d9470c : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach}\nJul 19 12:38:14 managed-node2 conmon[26768]: conmon f153d4517c8778d9470c : terminal_ctrl_fd: 14\nJul 19 12:38:14 managed-node2 conmon[26768]: conmon f153d4517c8778d9470c : winsz read side: 17, winsz write side: 18\nJul 19 12:38:14 managed-node2 conmon[26768]: conmon f153d4517c8778d9470c : container PID: 26778\nJul 19 12:38:14 managed-node2 conmon[26788]: conmon a8773b3857e3e0dd4e13 : failed to write to /proc/self/oom_score_adj: Permission denied\nJul 19 12:38:14 managed-node2 conmon[26789]: conmon a8773b3857e3e0dd4e13 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach}\nJul 19 12:38:14 managed-node2 conmon[26789]: conmon a8773b3857e3e0dd4e13 : terminal_ctrl_fd: 13\nJul 19 12:38:14 managed-node2 conmon[26789]: conmon a8773b3857e3e0dd4e13 : winsz read side: 16, winsz write side: 17\nJul 19 12:38:14 managed-node2 conmon[26789]: conmon a8773b3857e3e0dd4e13 : container PID: 26799\nJul 19 12:38:14 managed-node2 platform-python[26557]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml\nJul 19 12:38:14 managed-node2 platform-python[26557]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod:\n 0c3499cd78df9c3cbd4aa0aa8e91445278eb6157f70949cc299bc9c609a42f3c\n Container:\n a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458\n \nJul 19 12:38:14 managed-node2 platform-python[26557]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time=\"2025-07-19T12:38:13-04:00\" level=info msg=\"/bin/podman filtering at log level debug\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Using graph root /home/podman_basic_user/.local/share/containers/storage\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Using run root /run/user/3001/containers\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Using tmp dir /run/user/3001/libpod/tmp\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Using transient store: false\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Cached value indicated that metacopy is not being used\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Cached value indicated that native-diff is usable\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Initializing event backend file\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Successfully loaded 1 networks\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"found free device name cni-podman1\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"found free ipv4 network subnet 10.89.0.0/24\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-19 12:38:13.521272 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Successfully loaded 2 networks\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Looking up image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"reference \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\\\" does not resolve to an image ID\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"reference \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\\\" does not resolve to an image ID\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"FROM \\\"scratch\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Cached value indicated that idmapped mounts for overlay are not supported\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Check for idmapped mounts support \"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"overlay: test mount indicated that volatile is being used\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/340addd39a0119ab1a39a6915aa4a3fafdf78511c642616c3fe04da09dcf3345/empty,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/340addd39a0119ab1a39a6915aa4a3fafdf78511c642616c3fe04da09dcf3345/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/340addd39a0119ab1a39a6915aa4a3fafdf78511c642616c3fe04da09dcf3345/work,userxattr,volatile,context=\\\"system_u:object_r:container_file_t:s0:c480,c514\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Container ID: ac8e6c0ad9d62a1134f2644b1390fd8fa36d22d0d6282cefc7edd95b4f95d64d\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:copy Args:[/usr/libexec/podman/catatonit /catatonit] Flags:[] Attrs:map[] Message:COPY /usr/libexec/podman/catatonit /catatonit Heredocs:[] Original:COPY /usr/libexec/podman/catatonit /catatonit}\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"COPY []string(nil), imagebuilder.Copy{FromFS:false, From:\\\"\\\", Src:[]string{\\\"/usr/libexec/podman/catatonit\\\"}, Dest:\\\"/catatonit\\\", Download:false, Chown:\\\"\\\", Chmod:\\\"\\\", Checksum:\\\"\\\", Files:[]imagebuilder.File(nil)}\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"added content file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:entrypoint Args:[/catatonit -P] Flags:[] Attrs:map[json:true] Message:ENTRYPOINT /catatonit -P Heredocs:[] Original:ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]}\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"COMMIT localhost/podman-pause:4.9.4-dev-1708535009\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"COMMIT \\\"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"committing image with reference \\\"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\\\" is allowed by policy\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"layer list: [\\\"340addd39a0119ab1a39a6915aa4a3fafdf78511c642616c3fe04da09dcf3345\\\"]\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"using \\\"/var/tmp/buildah2427832820\\\" to hold temporary data\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Tar with options on /home/podman_basic_user/.local/share/containers/storage/overlay/340addd39a0119ab1a39a6915aa4a3fafdf78511c642616c3fe04da09dcf3345/diff\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"layer \\\"340addd39a0119ab1a39a6915aa4a3fafdf78511c642616c3fe04da09dcf3345\\\" size is 767488 bytes, uncompressed digest sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690, possibly-compressed digest sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"OCIv1 config = {\\\"created\\\":\\\"2025-07-19T16:38:13.656892898Z\\\",\\\"architecture\\\":\\\"amd64\\\",\\\"os\\\":\\\"linux\\\",\\\"config\\\":{\\\"Env\\\":[\\\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\\\"],\\\"Entrypoint\\\":[\\\"/catatonit\\\",\\\"-P\\\"],\\\"Labels\\\":{\\\"io.buildah.version\\\":\\\"1.33.5\\\"}},\\\"rootfs\\\":{\\\"type\\\":\\\"layers\\\",\\\"diff_ids\\\":[\\\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\\\"]},\\\"history\\\":[{\\\"created\\\":\\\"2025-07-19T16:38:13.656345599Z\\\",\\\"created_by\\\":\\\"/bin/sh -c #(nop) COPY file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd in /catatonit \\\",\\\"empty_layer\\\":true},{\\\"created\\\":\\\"2025-07-19T16:38:13.660597339Z\\\",\\\"created_by\\\":\\\"/bin/sh -c #(nop) ENTRYPOINT [\\\\\\\"/catatonit\\\\\\\", \\\\\\\"-P\\\\\\\"]\\\"}]}\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"OCIv1 manifest = {\\\"schemaVersion\\\":2,\\\"mediaType\\\":\\\"application/vnd.oci.image.manifest.v1+json\\\",\\\"config\\\":{\\\"mediaType\\\":\\\"application/vnd.oci.image.config.v1+json\\\",\\\"digest\\\":\\\"sha256:9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\\\",\\\"size\\\":668},\\\"layers\\\":[{\\\"mediaType\\\":\\\"application/vnd.oci.image.layer.v1.tar\\\",\\\"digest\\\":\\\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\\\",\\\"size\\\":767488}],\\\"annotations\\\":{\\\"org.opencontainers.image.base.digest\\\":\\\"\\\",\\\"org.opencontainers.image.base.name\\\":\\\"\\\"}}\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Docker v2s2 config = {\\\"created\\\":\\\"2025-07-19T16:38:13.656892898Z\\\",\\\"container\\\":\\\"ac8e6c0ad9d62a1134f2644b1390fd8fa36d22d0d6282cefc7edd95b4f95d64d\\\",\\\"container_config\\\":{\\\"Hostname\\\":\\\"\\\",\\\"Domainname\\\":\\\"\\\",\\\"User\\\":\\\"\\\",\\\"AttachStdin\\\":false,\\\"AttachStdout\\\":false,\\\"AttachStderr\\\":false,\\\"Tty\\\":false,\\\"OpenStdin\\\":false,\\\"StdinOnce\\\":false,\\\"Env\\\":[\\\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\\\"],\\\"Cmd\\\":[],\\\"Image\\\":\\\"\\\",\\\"Volumes\\\":{},\\\"WorkingDir\\\":\\\"\\\",\\\"Entrypoint\\\":[\\\"/catatonit\\\",\\\"-P\\\"],\\\"OnBuild\\\":[],\\\"Labels\\\":{\\\"io.buildah.version\\\":\\\"1.33.5\\\"}},\\\"config\\\":{\\\"Hostname\\\":\\\"\\\",\\\"Domainname\\\":\\\"\\\",\\\"User\\\":\\\"\\\",\\\"AttachStdin\\\":false,\\\"AttachStdout\\\":false,\\\"AttachStderr\\\":false,\\\"Tty\\\":false,\\\"OpenStdin\\\":false,\\\"StdinOnce\\\":false,\\\"Env\\\":[\\\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\\\"],\\\"Cmd\\\":[],\\\"Image\\\":\\\"\\\",\\\"Volumes\\\":{},\\\"WorkingDir\\\":\\\"\\\",\\\"Entrypoint\\\":[\\\"/catatonit\\\",\\\"-P\\\"],\\\"OnBuild\\\":[],\\\"Labels\\\":{\\\"io.buildah.version\\\":\\\"1.33.5\\\"}},\\\"architecture\\\":\\\"amd64\\\",\\\"os\\\":\\\"linux\\\",\\\"rootfs\\\":{\\\"type\\\":\\\"layers\\\",\\\"diff_ids\\\":[\\\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\\\"]},\\\"history\\\":[{\\\"created\\\":\\\"2025-07-19T16:38:13.656345599Z\\\",\\\"created_by\\\":\\\"/bin/sh -c #(nop) COPY file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd in /catatonit \\\",\\\"empty_layer\\\":true},{\\\"created\\\":\\\"2025-07-19T16:38:13.660597339Z\\\",\\\"created_by\\\":\\\"/bin/sh -c #(nop) ENTRYPOINT [\\\\\\\"/catatonit\\\\\\\", \\\\\\\"-P\\\\\\\"]\\\"}]}\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Docker v2s2 manifest = {\\\"schemaVersion\\\":2,\\\"mediaType\\\":\\\"application/vnd.docker.distribution.manifest.v2+json\\\",\\\"config\\\":{\\\"mediaType\\\":\\\"application/vnd.docker.container.image.v1+json\\\",\\\"size\\\":1342,\\\"digest\\\":\\\"sha256:803cd64c1bc1a2e7297b3d5f520a915c581e4037aabac925fb21fc3ad8b279ee\\\"},\\\"layers\\\":[{\\\"mediaType\\\":\\\"application/vnd.docker.image.rootfs.diff.tar\\\",\\\"size\\\":767488,\\\"digest\\\":\\\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\\\"}]}\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Using SQLite blob info cache at /home/podman_basic_user/.local/share/containers/cache/blob-info-cache-v1.sqlite\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"IsRunningImageAllowed for image containers-storage:\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\" Using transport \\\"containers-storage\\\" policy section \"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\" Requirement 0: allowed\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Overall: allowed\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"start reading config\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"finished reading config\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Manifest has MIME type application/vnd.oci.image.manifest.v1+json, ordered candidate list [application/vnd.oci.image.manifest.v1+json, application/vnd.docker.distribution.manifest.v2+json, application/vnd.docker.distribution.manifest.v1+prettyjws, application/vnd.docker.distribution.manifest.v1+json]\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"... will first try using the original manifest unmodified\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Checking if we can reuse blob sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690: general substitution = true, compression for MIME type \\\"application/vnd.oci.image.layer.v1.tar\\\" = true\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"reading layer \\\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"No compression detected\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Using original blob without modification\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Applying tar in /home/podman_basic_user/.local/share/containers/storage/overlay/d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690/diff\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"finished reading layer \\\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"No compression detected\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Compression change for blob sha256:9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63 (\\\"application/vnd.oci.image.config.v1+json\\\") not supported\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Using original blob without modification\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"setting image creation date to 2025-07-19 16:38:13.656892898 +0000 UTC\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"created new image ID \\\"9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\\\" with metadata \\\"{}\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"added name \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" to image \\\"9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"printing final image id \\\"9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Pod using bridge network mode\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Got pod cgroup as /libpod_parent/0c3499cd78df9c3cbd4aa0aa8e91445278eb6157f70949cc299bc9c609a42f3c\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Looking up image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63)\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Inspecting image 9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Inspecting image 9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Inspecting image 9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"using systemd mode: false\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"setting container name 0c3499cd78df-infra\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Loading seccomp profile from \\\"/usr/share/containers/seccomp.json\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Allocated lock 1 for container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Created container \\\"f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Container \\\"f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c\\\" has work directory \\\"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Container \\\"f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c\\\" has run directory \\\"/run/user/3001/containers/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Pulling image quay.io/libpod/testimage:20210610 (policy: missing)\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"using systemd mode: false\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"adding container to pod httpd1\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"setting container name httpd1-httpd1\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Loading seccomp profile from \\\"/usr/share/containers/seccomp.json\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=info msg=\"Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Adding mount /proc\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Adding mount /dev\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Adding mount /dev/pts\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Adding mount /dev/mqueue\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Adding mount /sys\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Adding mount /sys/fs/cgroup\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Allocated lock 2 for container a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Created container \\\"a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Container \\\"a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458\\\" has work directory \\\"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Container \\\"a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458\\\" has run directory \\\"/run/user/3001/containers/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Strongconnecting node f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Pushed f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c onto stack\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Finishing node f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c. Popped f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c off stack\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Strongconnecting node a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Pushed a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 onto stack\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Finishing node a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458. Popped a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 off stack\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/T3ZNBLNG2W7D2UELJU7O7YZ76X,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/127ba02fe95229d135be77cff068ce7d066eb98b87e356cce8c34501e113568e/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/127ba02fe95229d135be77cff068ce7d066eb98b87e356cce8c34501e113568e/work,userxattr,context=\\\"system_u:object_r:container_file_t:s0:c330,c361\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Mounted container \\\"f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c\\\" at \\\"/home/podman_basic_user/.local/share/containers/storage/overlay/127ba02fe95229d135be77cff068ce7d066eb98b87e356cce8c34501e113568e/merged\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Created root filesystem for container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c at /home/podman_basic_user/.local/share/containers/storage/overlay/127ba02fe95229d135be77cff068ce7d066eb98b87e356cce8c34501e113568e/merged\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Made network namespace at /run/user/3001/netns/netns-06b3cc7d-4137-7077-edbe-bd7530bc2101 for container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"creating rootless network namespace with name \\\"rootless-netns-d22c9f230d0691b8f418\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"slirp4netns command: /bin/slirp4netns --disable-host-loopback --mtu=65520 --enable-sandbox --enable-seccomp --enable-ipv6 -c -r 3 --netns-type=path /run/user/3001/netns/rootless-netns-d22c9f230d0691b8f418 tap0\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"The path of /etc/resolv.conf in the mount ns is \\\"/etc/resolv.conf\\\"\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"cni result for container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c network podman-default-kube-network: &{0.4.0 [{Name:cni-podman1 Mac:76:19:76:c8:78:b3 Sandbox:} {Name:vetha780888b Mac:f2:ee:6b:fd:41:a0 Sandbox:} {Name:eth0 Mac:2e:67:99:01:50:2a Sandbox:/run/user/3001/netns/netns-06b3cc7d-4137-7077-edbe-bd7530bc2101}] [{Version:4 Interface:0xc000c00b08 Address:{IP:10.89.0.2 Mask:ffffff00} Gateway:10.89.0.1}] [{Dst:{IP:0.0.0.0 Mask:00000000} GW:}] {[10.89.0.1] [dns.podman] []}}\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"rootlessport: time=\\\"2025-07-19T12:38:14-04:00\\\" level=info msg=\\\"Starting parent driver\\\"\\n\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"rootlessport: time=\\\"2025-07-19T12:38:14-04:00\\\" level=info msg=\\\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport2421233428/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport2421233428/.bp.sock]\\\"\\n\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"rootlessport: time=\\\"2025-07-19T12:38:14-04:00\\\" level=info msg=\\\"Starting child driver in child netns (\\\\\\\"/proc/self/exe\\\\\\\" [rootlessport-child])\\\"\\n\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"rootlessport: time=\\\"2025-07-19T12:38:14-04:00\\\" level=info msg=\\\"Waiting for initComplete\\\"\\n\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"rootlessport: time=\\\"2025-07-19T12:38:14-04:00\\\" level=info msg=\\\"initComplete is closed; parent and child established the communication channel\\\"\\n\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"rootlessport: time=\\\"2025-07-19T12:38:14-04:00\\\" level=info msg=\\\"Exposing ports [{ 80 15001 1 tcp}]\\\"\\n\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"rootlessport is ready\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"rootlessport: time=\\\"2025-07-19T12:38:14-04:00\\\" level=info msg=Ready\\n\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"/etc/system-fips does not exist on host, not mounting FIPS mode subscription\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"reading hooks from /usr/share/containers/oci/hooks.d\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Workdir \\\"/\\\" resolved to host path \\\"/home/podman_basic_user/.local/share/containers/storage/overlay/127ba02fe95229d135be77cff068ce7d066eb98b87e356cce8c34501e113568e/merged\\\"\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Created OCI spec for container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata/config.json\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Got pod cgroup as \"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"/usr/bin/conmon messages will be logged to syslog\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"running conmon: /usr/bin/conmon\" args=\"[--api-version 1 -c f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c -u f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c -r /usr/bin/runc -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata -p /run/user/3001/containers/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata/pidfile -n 0c3499cd78df-infra --exit-dir /run/user/3001/libpod/tmp/exits --full-attach -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg cgroupfs --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c]\"\n time=\"2025-07-19T12:38:14-04:00\" level=info msg=\"Failed to add conmon to cgroupfs sandbox cgroup: creating cgroup for blkio: mkdir /sys/fs/cgroup/blkio/libpod_parent: permission denied\"\n [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied\n \n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Received: 26778\"\n time=\"2025-07-19T12:38:14-04:00\" level=info msg=\"Got Conmon PID as 26768\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Created container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c in OCI runtime\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Adding nameserver(s) from network status of '[\\\"10.89.0.1\\\"]'\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Adding search domain(s) from network status of '[\\\"dns.podman\\\"]'\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Starting container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c with command [/catatonit -P]\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Started container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/Q6SNT2SFVF32LFZYXFZFNM34JV,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/30ccda56ccc67908370dcb7e56c6e44595e70d6af5e179ee106b4a3d90b31e0f/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/30ccda56ccc67908370dcb7e56c6e44595e70d6af5e179ee106b4a3d90b31e0f/work,userxattr,context=\\\"system_u:object_r:container_file_t:s0:c330,c361\\\"\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Mounted container \\\"a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458\\\" at \\\"/home/podman_basic_user/.local/share/containers/storage/overlay/30ccda56ccc67908370dcb7e56c6e44595e70d6af5e179ee106b4a3d90b31e0f/merged\\\"\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Created root filesystem for container a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 at /home/podman_basic_user/.local/share/containers/storage/overlay/30ccda56ccc67908370dcb7e56c6e44595e70d6af5e179ee106b4a3d90b31e0f/merged\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"/etc/system-fips does not exist on host, not mounting FIPS mode subscription\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"reading hooks from /usr/share/containers/oci/hooks.d\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Workdir \\\"/var/www\\\" resolved to a volume or mount\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Created OCI spec for container a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata/config.json\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Got pod cgroup as \"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"/usr/bin/conmon messages will be logged to syslog\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"running conmon: /usr/bin/conmon\" args=\"[--api-version 1 -c a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 -u a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 -r /usr/bin/runc -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata -p /run/user/3001/containers/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata/pidfile -n httpd1-httpd1 --exit-dir /run/user/3001/libpod/tmp/exits --full-attach -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg cgroupfs --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458]\"\n time=\"2025-07-19T12:38:14-04:00\" level=info msg=\"Failed to add conmon to cgroupfs sandbox cgroup: creating cgroup for cpu: mkdir /sys/fs/cgroup/cpu/conmon: permission denied\"\n [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied\n \n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Received: 26799\"\n time=\"2025-07-19T12:38:14-04:00\" level=info msg=\"Got Conmon PID as 26789\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Created container a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 in OCI runtime\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Starting container a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 with command [/bin/busybox-extras httpd -f -p 80]\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Started container a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Called kube.PersistentPostRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Shutting down engines\"\nJul 19 12:38:14 managed-node2 platform-python[26557]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0\nJul 19 12:38:14 managed-node2 sudo[26554]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:38:14 managed-node2 sudo[26930]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lmrmjlkkbsynluhmnwsuczlkvrvxmsws ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943094.4993763-15928-49906019321868/AnsiballZ_systemd.py'\nJul 19 12:38:14 managed-node2 sudo[26930]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:38:14 managed-node2 platform-python[26933]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None\nJul 19 12:38:14 managed-node2 systemd[25528]: Reloading.\nJul 19 12:38:14 managed-node2 sudo[26930]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:38:15 managed-node2 dnsmasq[26754]: listening on cni-podman1(#3): fe80::7419:76ff:fec8:78b3%cni-podman1\nJul 19 12:38:15 managed-node2 sudo[27068]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gajwmakwkbshdhvfjxukfbkcepplsltd ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943095.0683737-15953-10632554991967/AnsiballZ_systemd.py'\nJul 19 12:38:15 managed-node2 sudo[27068]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:38:15 managed-node2 platform-python[27071]: ansible-systemd Invoked with name= scope=user enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None user=None\nJul 19 12:38:15 managed-node2 systemd[25528]: Reloading.\nJul 19 12:38:15 managed-node2 sudo[27068]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:38:15 managed-node2 sudo[27207]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dcmefbhxtsrhgtubtgzkfqommjfyuibn ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943095.7285938-15986-176076416334674/AnsiballZ_systemd.py'\nJul 19 12:38:15 managed-node2 sudo[27207]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:38:16 managed-node2 platform-python[27210]: ansible-systemd Invoked with name= scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None\nJul 19 12:38:16 managed-node2 systemd[25528]: Created slice podman\\x2dkube.slice.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:16 managed-node2 systemd[25528]: Starting A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit UNIT has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has begun starting up.\nJul 19 12:38:16 managed-node2 conmon[26789]: conmon a8773b3857e3e0dd4e13 : container 26799 exited with status 137\nJul 19 12:38:16 managed-node2 conmon[26768]: conmon f153d4517c8778d9470c : container 26778 exited with status 137\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458)\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=info msg=\"Using sqlite as database backend\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using graph driver overlay\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using graph root /home/podman_basic_user/.local/share/containers/storage\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using run root /run/user/3001/containers\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using tmp dir /run/user/3001/libpod/tmp\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using transient store: false\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Cached value indicated that metacopy is not being used\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Cached value indicated that native-diff is usable\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Initializing event backend file\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=info msg=\"Setting parallel job count to 7\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c)\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=info msg=\"Using sqlite as database backend\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using graph driver overlay\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using graph root /home/podman_basic_user/.local/share/containers/storage\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using run root /run/user/3001/containers\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using tmp dir /run/user/3001/libpod/tmp\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using transient store: false\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Cached value indicated that metacopy is not being used\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Cached value indicated that native-diff is usable\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Initializing event backend file\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=info msg=\"Setting parallel job count to 7\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458)\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Shutting down engines\"\nJul 19 12:38:16 managed-node2 kernel: cni-podman1: port 1(vetha780888b) entered disabled state\nJul 19 12:38:16 managed-node2 kernel: device vetha780888b left promiscuous mode\nJul 19 12:38:16 managed-node2 kernel: cni-podman1: port 1(vetha780888b) entered disabled state\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c)\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Shutting down engines\"\nJul 19 12:38:16 managed-node2 podman[27216]: Pods stopped:\nJul 19 12:38:16 managed-node2 podman[27216]: 0c3499cd78df9c3cbd4aa0aa8e91445278eb6157f70949cc299bc9c609a42f3c\nJul 19 12:38:16 managed-node2 podman[27216]: Pods removed:\nJul 19 12:38:16 managed-node2 podman[27216]: 0c3499cd78df9c3cbd4aa0aa8e91445278eb6157f70949cc299bc9c609a42f3c\nJul 19 12:38:16 managed-node2 podman[27216]: Secrets removed:\nJul 19 12:38:16 managed-node2 podman[27216]: Volumes removed:\nJul 19 12:38:16 managed-node2 systemd[25528]: Started rootless-netns-1ff27aec.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:16 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): veth24653eaf: link is not ready\nJul 19 12:38:16 managed-node2 kernel: cni-podman1: port 1(veth24653eaf) entered blocking state\nJul 19 12:38:16 managed-node2 kernel: cni-podman1: port 1(veth24653eaf) entered disabled state\nJul 19 12:38:16 managed-node2 kernel: device veth24653eaf entered promiscuous mode\nJul 19 12:38:16 managed-node2 kernel: cni-podman1: port 1(veth24653eaf) entered blocking state\nJul 19 12:38:16 managed-node2 kernel: cni-podman1: port 1(veth24653eaf) entered forwarding state\nJul 19 12:38:16 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth24653eaf: link becomes ready\nJul 19 12:38:16 managed-node2 dnsmasq[27465]: listening on cni-podman1(#3): 10.89.0.1\nJul 19 12:38:16 managed-node2 dnsmasq[27467]: started, version 2.79 cachesize 150\nJul 19 12:38:16 managed-node2 dnsmasq[27467]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify\nJul 19 12:38:16 managed-node2 dnsmasq[27467]: using local addresses only for domain dns.podman\nJul 19 12:38:16 managed-node2 dnsmasq[27467]: reading /etc/resolv.conf\nJul 19 12:38:16 managed-node2 dnsmasq[27467]: using local addresses only for domain dns.podman\nJul 19 12:38:16 managed-node2 dnsmasq[27467]: using nameserver 10.0.2.3#53\nJul 19 12:38:16 managed-node2 dnsmasq[27467]: using nameserver 10.29.169.13#53\nJul 19 12:38:16 managed-node2 dnsmasq[27467]: using nameserver 10.29.170.12#53\nJul 19 12:38:16 managed-node2 dnsmasq[27467]: using nameserver 10.2.32.1#53\nJul 19 12:38:16 managed-node2 dnsmasq[27467]: read /run/user/3001/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses\nJul 19 12:38:17 managed-node2 podman[27216]: Pod:\nJul 19 12:38:17 managed-node2 podman[27216]: 2d1d21944d4ca561d0a237f029182068a925bb49513d0d920c7ad527b530a87a\nJul 19 12:38:17 managed-node2 podman[27216]: Container:\nJul 19 12:38:17 managed-node2 podman[27216]: fbdb7144dbaf3a0b80484872c9bcae1ed8f6a793661386bc91aa084464c69027\nJul 19 12:38:17 managed-node2 systemd[25528]: Started A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:17 managed-node2 sudo[27207]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:38:17 managed-node2 platform-python[27643]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 19 12:38:18 managed-node2 platform-python[27767]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:38:18 managed-node2 dnsmasq[27467]: listening on cni-podman1(#3): fe80::f826:e2ff:fec6:eea3%cni-podman1\nJul 19 12:38:19 managed-node2 platform-python[27892]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:20 managed-node2 platform-python[28016]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:38:21 managed-node2 platform-python[28139]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:38:21 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 19 12:38:21 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 19 12:38:22 managed-node2 platform-python[28430]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:38:23 managed-node2 platform-python[28553]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:38:23 managed-node2 platform-python[28676]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 19 12:38:23 managed-node2 platform-python[28775]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752943103.1443834-16356-64021954424990/source _original_basename=tmp0hh2oj3u follow=False checksum=b06d991e561d2233cf906d852db9b578dc61ce26 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None\nJul 19 12:38:24 managed-node2 platform-python[28900]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:38:24 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice.\n-- Subject: Unit machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.3685] manager: (cni-podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3)\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.3718] manager: (vethf4165b4a): new Veth device (/org/freedesktop/NetworkManager/Devices/4)\nJul 19 12:38:24 managed-node2 systemd-udevd[28949]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.\nJul 19 12:38:24 managed-node2 systemd-udevd[28949]: Could not generate persistent MAC address for vethf4165b4a: No such file or directory\nJul 19 12:38:24 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vethf4165b4a: link is not ready\nJul 19 12:38:24 managed-node2 systemd-udevd[28948]: Using default interface naming scheme 'rhel-8.0'.\nJul 19 12:38:24 managed-node2 systemd-udevd[28948]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.\nJul 19 12:38:24 managed-node2 systemd-udevd[28948]: Could not generate persistent MAC address for cni-podman1: No such file or directory\nJul 19 12:38:24 managed-node2 kernel: cni-podman1: port 1(vethf4165b4a) entered blocking state\nJul 19 12:38:24 managed-node2 kernel: cni-podman1: port 1(vethf4165b4a) entered disabled state\nJul 19 12:38:24 managed-node2 kernel: device vethf4165b4a entered promiscuous mode\nJul 19 12:38:24 managed-node2 dbus-daemon[591]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.5' (uid=0 pid=661 comm=\"/usr/sbin/NetworkManager --no-daemon \" label=\"system_u:system_r:NetworkManager_t:s0\")\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4140] device (cni-podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', sys-iface-state: 'external')\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4145] device (cni-podman1): state change: unavailable -> disconnected (reason 'connection-assumed', sys-iface-state: 'external')\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4153] device (cni-podman1): Activation: starting connection 'cni-podman1' (288926ec-c137-47aa-80eb-b1812c1bfed2)\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4154] device (cni-podman1): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'external')\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4157] device (cni-podman1): state change: prepare -> config (reason 'none', sys-iface-state: 'external')\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4159] device (cni-podman1): state change: config -> ip-config (reason 'none', sys-iface-state: 'external')\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4160] device (cni-podman1): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'external')\nJul 19 12:38:24 managed-node2 systemd[1]: Starting Network Manager Script Dispatcher Service...\n-- Subject: Unit NetworkManager-dispatcher.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit NetworkManager-dispatcher.service has begun starting up.\nJul 19 12:38:24 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready\nJul 19 12:38:24 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready\nJul 19 12:38:24 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vethf4165b4a: link becomes ready\nJul 19 12:38:24 managed-node2 kernel: cni-podman1: port 1(vethf4165b4a) entered blocking state\nJul 19 12:38:24 managed-node2 kernel: cni-podman1: port 1(vethf4165b4a) entered forwarding state\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4643] device (vethf4165b4a): carrier: link connected\nJul 19 12:38:24 managed-node2 dbus-daemon[591]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher'\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4647] device (cni-podman1): carrier: link connected\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4663] device (cni-podman1): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'external')\nJul 19 12:38:24 managed-node2 systemd[1]: Started Network Manager Script Dispatcher Service.\n-- Subject: Unit NetworkManager-dispatcher.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit NetworkManager-dispatcher.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4665] device (cni-podman1): state change: secondaries -> activated (reason 'none', sys-iface-state: 'external')\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4669] device (cni-podman1): Activation: successful, device activated.\nJul 19 12:38:24 managed-node2 dnsmasq[29070]: listening on cni-podman1(#3): 10.89.0.1\nJul 19 12:38:24 managed-node2 dnsmasq[29074]: started, version 2.79 cachesize 150\nJul 19 12:38:24 managed-node2 dnsmasq[29074]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify\nJul 19 12:38:24 managed-node2 dnsmasq[29074]: using local addresses only for domain dns.podman\nJul 19 12:38:24 managed-node2 dnsmasq[29074]: reading /etc/resolv.conf\nJul 19 12:38:24 managed-node2 dnsmasq[29074]: using local addresses only for domain dns.podman\nJul 19 12:38:24 managed-node2 dnsmasq[29074]: using nameserver 10.29.169.13#53\nJul 19 12:38:24 managed-node2 dnsmasq[29074]: using nameserver 10.29.170.12#53\nJul 19 12:38:24 managed-node2 dnsmasq[29074]: using nameserver 10.2.32.1#53\nJul 19 12:38:24 managed-node2 dnsmasq[29074]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses\nJul 19 12:38:24 managed-node2 systemd[1]: Started libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope.\n-- Subject: Unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:24 managed-node2 conmon[29076]: conmon c67c85c8cb2aae9cc679 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach}\nJul 19 12:38:24 managed-node2 conmon[29076]: conmon c67c85c8cb2aae9cc679 : terminal_ctrl_fd: 13\nJul 19 12:38:24 managed-node2 conmon[29076]: conmon c67c85c8cb2aae9cc679 : winsz read side: 17, winsz write side: 18\nJul 19 12:38:24 managed-node2 systemd[1]: Started libcontainer container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.\n-- Subject: Unit libpod-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:24 managed-node2 conmon[29076]: conmon c67c85c8cb2aae9cc679 : container PID: 29087\nJul 19 12:38:24 managed-node2 systemd[1]: Started libpod-conmon-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope.\n-- Subject: Unit libpod-conmon-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-conmon-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:24 managed-node2 conmon[29098]: conmon add9038e274085c173b1 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach}\nJul 19 12:38:24 managed-node2 conmon[29098]: conmon add9038e274085c173b1 : terminal_ctrl_fd: 12\nJul 19 12:38:24 managed-node2 conmon[29098]: conmon add9038e274085c173b1 : winsz read side: 16, winsz write side: 17\nJul 19 12:38:24 managed-node2 systemd[1]: Started libcontainer container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.\n-- Subject: Unit libpod-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:24 managed-node2 conmon[29098]: conmon add9038e274085c173b1 : container PID: 29108\nJul 19 12:38:24 managed-node2 platform-python[28900]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml\nJul 19 12:38:24 managed-node2 platform-python[28900]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod:\n f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816\n Container:\n add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\n \nJul 19 12:38:24 managed-node2 platform-python[28900]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time=\"2025-07-19T12:38:24-04:00\" level=info msg=\"/usr/bin/podman filtering at log level debug\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Using run root /run/containers/storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Using tmp dir /run/libpod\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Using transient store: false\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\n time=\"2025-07-19T12:38:24-04:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Initializing event backend file\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-19 12:35:55.640649556 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Successfully loaded 2 networks\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Looking up image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a)\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Pod using bridge network mode\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice for parent machine.slice and name libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Looking up image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a)\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Inspecting image fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Inspecting image fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Inspecting image fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"using systemd mode: false\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"setting container name f8000a88fe4a-infra\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Loading seccomp profile from \\\"/usr/share/containers/seccomp.json\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Allocated lock 1 for container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Cached value indicated that idmapped mounts for overlay are not supported\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Check for idmapped mounts support \"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created container \\\"c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Container \\\"c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\\\" has work directory \\\"/var/lib/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Container \\\"c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\\\" has run directory \\\"/run/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Pulling image quay.io/libpod/testimage:20210610 (policy: missing)\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"using systemd mode: false\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"adding container to pod httpd2\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"setting container name httpd2-httpd2\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Loading seccomp profile from \\\"/usr/share/containers/seccomp.json\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=info msg=\"Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Adding mount /proc\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Adding mount /dev\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Adding mount /dev/pts\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Adding mount /dev/mqueue\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Adding mount /sys\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Adding mount /sys/fs/cgroup\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Allocated lock 2 for container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created container \\\"add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Container \\\"add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\\\" has work directory \\\"/var/lib/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Container \\\"add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\\\" has run directory \\\"/run/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Strongconnecting node c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Pushed c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c onto stack\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Finishing node c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c. Popped c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c off stack\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Strongconnecting node add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Pushed add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f onto stack\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Finishing node add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f. Popped add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f off stack\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/HXWSEHVDVE6HABOKZ6B2SSNLKD,upperdir=/var/lib/containers/storage/overlay/6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e/diff,workdir=/var/lib/containers/storage/overlay/6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e/work,nodev,metacopy=on,context=\\\"system_u:object_r:container_file_t:s0:c723,c1018\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Mounted container \\\"c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\\\" at \\\"/var/lib/containers/storage/overlay/6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e/merged\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created root filesystem for container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c at /var/lib/containers/storage/overlay/6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e/merged\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Made network namespace at /run/netns/netns-f67fee73-2bbe-5ce9-31b0-8129b0eb7f47 for container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"cni result for container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c network podman-default-kube-network: &{0.4.0 [{Name:cni-podman1 Mac:ee:2d:d5:97:9a:6b Sandbox:} {Name:vethf4165b4a Mac:d2:23:54:53:0f:5f Sandbox:} {Name:eth0 Mac:ea:eb:9c:fe:80:d8 Sandbox:/run/netns/netns-f67fee73-2bbe-5ce9-31b0-8129b0eb7f47}] [{Version:4 Interface:0xc0005a9428 Address:{IP:10.89.0.2 Mask:ffffff00} Gateway:10.89.0.1}] [{Dst:{IP:0.0.0.0 Mask:00000000} GW:}] {[10.89.0.1] [dns.podman] []}}\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"/etc/system-fips does not exist on host, not mounting FIPS mode subscription\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Setting Cgroups for container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c to machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice:libpod:c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"reading hooks from /usr/share/containers/oci/hooks.d\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Workdir \\\"/\\\" resolved to host path \\\"/var/lib/containers/storage/overlay/6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e/merged\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created OCI spec for container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c at /var/lib/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata/config.json\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice for parent machine.slice and name libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"/usr/bin/conmon messages will be logged to syslog\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"running conmon: /usr/bin/conmon\" args=\"[--api-version 1 -c c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c -u c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c -r /usr/bin/runc -b /var/lib/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata -p /run/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata/pidfile -n f8000a88fe4a-infra --exit-dir /run/libpod/exits --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c]\"\n time=\"2025-07-19T12:38:24-04:00\" level=info msg=\"Running conmon under slice machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice and unitName libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Received: 29087\"\n time=\"2025-07-19T12:38:24-04:00\" level=info msg=\"Got Conmon PID as 29076\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c in OCI runtime\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Adding nameserver(s) from network status of '[\\\"10.89.0.1\\\"]'\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Adding search domain(s) from network status of '[\\\"dns.podman\\\"]'\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Starting container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c with command [/catatonit -P]\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Started container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/HJUTFIUMULI3FBOA3A6VGXTPPL,upperdir=/var/lib/containers/storage/overlay/ea49505f4f2f1b3e93444d6d56c71c0ed1bb99d0acd9df081de88db0be157d45/diff,workdir=/var/lib/containers/storage/overlay/ea49505f4f2f1b3e93444d6d56c71c0ed1bb99d0acd9df081de88db0be157d45/work,nodev,metacopy=on,context=\\\"system_u:object_r:container_file_t:s0:c723,c1018\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Mounted container \\\"add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\\\" at \\\"/var/lib/containers/storage/overlay/ea49505f4f2f1b3e93444d6d56c71c0ed1bb99d0acd9df081de88db0be157d45/merged\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created root filesystem for container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f at /var/lib/containers/storage/overlay/ea49505f4f2f1b3e93444d6d56c71c0ed1bb99d0acd9df081de88db0be157d45/merged\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"/etc/system-fips does not exist on host, not mounting FIPS mode subscription\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Setting Cgroups for container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f to machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice:libpod:add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"reading hooks from /usr/share/containers/oci/hooks.d\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Workdir \\\"/var/www\\\" resolved to a volume or mount\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created OCI spec for container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f at /var/lib/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata/config.json\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice for parent machine.slice and name libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"/usr/bin/conmon messages will be logged to syslog\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"running conmon: /usr/bin/conmon\" args=\"[--api-version 1 -c add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f -u add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f -r /usr/bin/runc -b /var/lib/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata -p /run/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f]\"\n time=\"2025-07-19T12:38:24-04:00\" level=info msg=\"Running conmon under slice machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice and unitName libpod-conmon-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Received: 29108\"\n time=\"2025-07-19T12:38:24-04:00\" level=info msg=\"Got Conmon PID as 29098\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f in OCI runtime\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Starting container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f with command [/bin/busybox-extras httpd -f -p 80]\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Started container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Shutting down engines\"\nJul 19 12:38:24 managed-node2 platform-python[28900]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0\nJul 19 12:38:25 managed-node2 platform-python[29239]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None\nJul 19 12:38:25 managed-node2 systemd[1]: Reloading.\nJul 19 12:38:25 managed-node2 dnsmasq[29074]: listening on cni-podman1(#3): fe80::ec2d:d5ff:fe97:9a6b%cni-podman1\nJul 19 12:38:26 managed-node2 platform-python[29400]: ansible-systemd Invoked with name= scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None user=None\nJul 19 12:38:26 managed-node2 systemd[1]: Reloading.\nJul 19 12:38:26 managed-node2 platform-python[29563]: ansible-systemd Invoked with name= scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None\nJul 19 12:38:26 managed-node2 systemd[1]: Created slice system-podman\\x2dkube.slice.\n-- Subject: Unit system-podman\\x2dkube.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit system-podman\\x2dkube.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:26 managed-node2 systemd[1]: Starting A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun starting up.\nJul 19 12:38:26 managed-node2 conmon[29076]: conmon c67c85c8cb2aae9cc679 : container 29087 exited with status 137\nJul 19 12:38:26 managed-node2 systemd[1]: libpod-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has successfully entered the 'dead' state.\nJul 19 12:38:26 managed-node2 systemd[1]: libpod-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope completed and consumed the indicated resources.\nJul 19 12:38:26 managed-node2 conmon[29098]: conmon add9038e274085c173b1 : container 29108 exited with status 137\nJul 19 12:38:26 managed-node2 systemd[1]: libpod-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope has successfully entered the 'dead' state.\nJul 19 12:38:26 managed-node2 systemd[1]: libpod-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope completed and consumed the indicated resources.\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c)\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=info msg=\"Using sqlite as database backend\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f)\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=info msg=\"Using sqlite as database backend\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using graph driver overlay\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using run root /run/containers/storage\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using tmp dir /run/libpod\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using transient store: false\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Initializing event backend file\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=info msg=\"Setting parallel job count to 7\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using graph driver overlay\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using run root /run/containers/storage\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using tmp dir /run/libpod\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using transient store: false\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Initializing event backend file\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=info msg=\"Setting parallel job count to 7\"\nJul 19 12:38:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay-ea49505f4f2f1b3e93444d6d56c71c0ed1bb99d0acd9df081de88db0be157d45-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-ea49505f4f2f1b3e93444d6d56c71c0ed1bb99d0acd9df081de88db0be157d45-merged.mount has successfully entered the 'dead' state.\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f)\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Shutting down engines\"\nJul 19 12:38:26 managed-node2 systemd[1]: libpod-conmon-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-conmon-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope has successfully entered the 'dead' state.\nJul 19 12:38:26 managed-node2 kernel: cni-podman1: port 1(vethf4165b4a) entered disabled state\nJul 19 12:38:26 managed-node2 kernel: device vethf4165b4a left promiscuous mode\nJul 19 12:38:26 managed-node2 kernel: cni-podman1: port 1(vethf4165b4a) entered disabled state\nJul 19 12:38:26 managed-node2 systemd[1]: run-netns-netns\\x2df67fee73\\x2d2bbe\\x2d5ce9\\x2d31b0\\x2d8129b0eb7f47.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-netns-netns\\x2df67fee73\\x2d2bbe\\x2d5ce9\\x2d31b0\\x2d8129b0eb7f47.mount has successfully entered the 'dead' state.\nJul 19 12:38:27 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c-userdata-shm.mount has successfully entered the 'dead' state.\nJul 19 12:38:27 managed-node2 systemd[1]: var-lib-containers-storage-overlay-6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e-merged.mount has successfully entered the 'dead' state.\nJul 19 12:38:27 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:27-04:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c)\"\nJul 19 12:38:27 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:27-04:00\" level=debug msg=\"Shutting down engines\"\nJul 19 12:38:27 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 19 12:38:27 managed-node2 systemd[1]: Stopping libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope.\n-- Subject: Unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has begun shutting down.\nJul 19 12:38:27 managed-node2 systemd[1]: libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has successfully entered the 'dead' state.\nJul 19 12:38:27 managed-node2 systemd[1]: Stopped libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope.\n-- Subject: Unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has finished shutting down.\nJul 19 12:38:27 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice.\n-- Subject: Unit machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice has finished shutting down.\nJul 19 12:38:27 managed-node2 systemd[1]: machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice: Consumed 198ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice completed and consumed the indicated resources.\nJul 19 12:38:27 managed-node2 podman[29570]: Pods stopped:\nJul 19 12:38:27 managed-node2 podman[29570]: f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816\nJul 19 12:38:27 managed-node2 podman[29570]: Pods removed:\nJul 19 12:38:27 managed-node2 podman[29570]: f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816\nJul 19 12:38:27 managed-node2 podman[29570]: Secrets removed:\nJul 19 12:38:27 managed-node2 podman[29570]: Volumes removed:\nJul 19 12:38:27 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice.\n-- Subject: Unit machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:27 managed-node2 systemd[1]: Started libcontainer container 4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.\n-- Subject: Unit libpod-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:27 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vetha38befe0: link is not ready\nJul 19 12:38:27 managed-node2 systemd-udevd[29728]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.\nJul 19 12:38:27 managed-node2 systemd-udevd[29728]: Could not generate persistent MAC address for vetha38befe0: No such file or directory\nJul 19 12:38:27 managed-node2 NetworkManager[661]: [1752943107.3392] manager: (vetha38befe0): new Veth device (/org/freedesktop/NetworkManager/Devices/5)\nJul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered blocking state\nJul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered disabled state\nJul 19 12:38:27 managed-node2 kernel: device vetha38befe0 entered promiscuous mode\nJul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered blocking state\nJul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered forwarding state\nJul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered disabled state\nJul 19 12:38:27 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vetha38befe0: link becomes ready\nJul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered blocking state\nJul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered forwarding state\nJul 19 12:38:27 managed-node2 NetworkManager[661]: [1752943107.3666] device (vetha38befe0): carrier: link connected\nJul 19 12:38:27 managed-node2 NetworkManager[661]: [1752943107.3670] device (cni-podman1): carrier: link connected\nJul 19 12:38:27 managed-node2 dnsmasq[29798]: listening on cni-podman1(#3): 10.89.0.1\nJul 19 12:38:27 managed-node2 dnsmasq[29798]: listening on cni-podman1(#3): fe80::ec2d:d5ff:fe97:9a6b%cni-podman1\nJul 19 12:38:27 managed-node2 dnsmasq[29802]: started, version 2.79 cachesize 150\nJul 19 12:38:27 managed-node2 dnsmasq[29802]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify\nJul 19 12:38:27 managed-node2 dnsmasq[29802]: using local addresses only for domain dns.podman\nJul 19 12:38:27 managed-node2 dnsmasq[29802]: reading /etc/resolv.conf\nJul 19 12:38:27 managed-node2 dnsmasq[29802]: using local addresses only for domain dns.podman\nJul 19 12:38:27 managed-node2 dnsmasq[29802]: using nameserver 10.29.169.13#53\nJul 19 12:38:27 managed-node2 dnsmasq[29802]: using nameserver 10.29.170.12#53\nJul 19 12:38:27 managed-node2 dnsmasq[29802]: using nameserver 10.2.32.1#53\nJul 19 12:38:27 managed-node2 dnsmasq[29802]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses\nJul 19 12:38:27 managed-node2 systemd[1]: Started libcontainer container 64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.\n-- Subject: Unit libpod-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:27 managed-node2 systemd[1]: Started libcontainer container d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.\n-- Subject: Unit libpod-d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:27 managed-node2 podman[29570]: Pod:\nJul 19 12:38:27 managed-node2 podman[29570]: 63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7\nJul 19 12:38:27 managed-node2 podman[29570]: Container:\nJul 19 12:38:27 managed-node2 podman[29570]: d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425\nJul 19 12:38:27 managed-node2 systemd[1]: Started A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:28 managed-node2 platform-python[29967]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:38:29 managed-node2 platform-python[30100]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:30 managed-node2 platform-python[30224]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:38:31 managed-node2 platform-python[30347]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:38:32 managed-node2 platform-python[30636]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:38:32 managed-node2 platform-python[30759]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:38:33 managed-node2 platform-python[30882]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 19 12:38:33 managed-node2 platform-python[30981]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752943113.1342852-16787-124035634200669/source _original_basename=tmpprx4cnlk follow=False checksum=6f620a32a353317135005413ecc9cbab44a8759d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None\nJul 19 12:38:34 managed-node2 platform-python[31106]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:38:34 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice.\n-- Subject: Unit machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:34 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vethca04a2e2: link is not ready\nJul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered blocking state\nJul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered disabled state\nJul 19 12:38:34 managed-node2 kernel: device vethca04a2e2 entered promiscuous mode\nJul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered blocking state\nJul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered forwarding state\nJul 19 12:38:34 managed-node2 NetworkManager[661]: [1752943114.3363] manager: (vethca04a2e2): new Veth device (/org/freedesktop/NetworkManager/Devices/6)\nJul 19 12:38:34 managed-node2 systemd-udevd[31156]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.\nJul 19 12:38:34 managed-node2 systemd-udevd[31156]: Could not generate persistent MAC address for vethca04a2e2: No such file or directory\nJul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered disabled state\nJul 19 12:38:34 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready\nJul 19 12:38:34 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready\nJul 19 12:38:34 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vethca04a2e2: link becomes ready\nJul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered blocking state\nJul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered forwarding state\nJul 19 12:38:34 managed-node2 NetworkManager[661]: [1752943114.3654] device (vethca04a2e2): carrier: link connected\nJul 19 12:38:34 managed-node2 dnsmasq[29802]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 2 addresses\nJul 19 12:38:34 managed-node2 systemd[1]: Started libpod-conmon-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope.\n-- Subject: Unit libpod-conmon-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-conmon-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:34 managed-node2 systemd[1]: Started libcontainer container 9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.\n-- Subject: Unit libpod-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:34 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nJul 19 12:38:34 managed-node2 systemd[1]: Started libpod-conmon-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope.\n-- Subject: Unit libpod-conmon-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-conmon-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:34 managed-node2 systemd[1]: Started libcontainer container 798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.\n-- Subject: Unit libpod-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:35 managed-node2 platform-python[31387]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None\nJul 19 12:38:35 managed-node2 systemd[1]: Reloading.\nJul 19 12:38:35 managed-node2 platform-python[31548]: ansible-systemd Invoked with name= scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None user=None\nJul 19 12:38:35 managed-node2 systemd[1]: Reloading.\nJul 19 12:38:36 managed-node2 platform-python[31703]: ansible-systemd Invoked with name= scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None\nJul 19 12:38:36 managed-node2 systemd[1]: Starting A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun starting up.\nJul 19 12:38:36 managed-node2 systemd[1]: libpod-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope has successfully entered the 'dead' state.\nJul 19 12:38:36 managed-node2 systemd[1]: libpod-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope: Consumed 34ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope completed and consumed the indicated resources.\nJul 19 12:38:36 managed-node2 systemd[1]: libpod-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope has successfully entered the 'dead' state.\nJul 19 12:38:36 managed-node2 systemd[1]: libpod-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope: Consumed 36ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope completed and consumed the indicated resources.\nJul 19 12:38:36 managed-node2 dnsmasq[29802]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses\nJul 19 12:38:36 managed-node2 systemd[1]: var-lib-containers-storage-overlay-0e27a2de6423f234f5c5cc21592f99c374ae3f65ee2ffe512e2ea9260072c30b-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-0e27a2de6423f234f5c5cc21592f99c374ae3f65ee2ffe512e2ea9260072c30b-merged.mount has successfully entered the 'dead' state.\nJul 19 12:38:36 managed-node2 systemd[1]: libpod-conmon-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-conmon-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope has successfully entered the 'dead' state.\nJul 19 12:38:36 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered disabled state\nJul 19 12:38:36 managed-node2 kernel: device vethca04a2e2 left promiscuous mode\nJul 19 12:38:36 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered disabled state\nJul 19 12:38:36 managed-node2 systemd[1]: run-netns-netns\\x2dbc35cf78\\x2d8b29\\x2d812d\\x2d8688\\x2d6b3a472533c6.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-netns-netns\\x2dbc35cf78\\x2d8b29\\x2d812d\\x2d8688\\x2d6b3a472533c6.mount has successfully entered the 'dead' state.\nJul 19 12:38:36 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91-userdata-shm.mount has successfully entered the 'dead' state.\nJul 19 12:38:36 managed-node2 systemd[1]: var-lib-containers-storage-overlay-50218d54ec26584adc0c1ba212de5d0b7c4329564918e9762d222c23ddef0ca1-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-50218d54ec26584adc0c1ba212de5d0b7c4329564918e9762d222c23ddef0ca1-merged.mount has successfully entered the 'dead' state.\nJul 19 12:38:36 managed-node2 systemd[1]: libpod-conmon-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-conmon-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope has successfully entered the 'dead' state.\nJul 19 12:38:36 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice.\n-- Subject: Unit machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice has finished shutting down.\nJul 19 12:38:36 managed-node2 systemd[1]: machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice: Consumed 192ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice completed and consumed the indicated resources.\nJul 19 12:38:36 managed-node2 podman[31710]: Pods stopped:\nJul 19 12:38:36 managed-node2 podman[31710]: 7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a\nJul 19 12:38:36 managed-node2 podman[31710]: Pods removed:\nJul 19 12:38:36 managed-node2 podman[31710]: 7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a\nJul 19 12:38:36 managed-node2 podman[31710]: Secrets removed:\nJul 19 12:38:36 managed-node2 podman[31710]: Volumes removed:\nJul 19 12:38:37 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice.\n-- Subject: Unit machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:37 managed-node2 systemd[1]: Started libcontainer container c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.\n-- Subject: Unit libpod-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:37 managed-node2 NetworkManager[661]: [1752943117.2271] manager: (vetha6d4d23e): new Veth device (/org/freedesktop/NetworkManager/Devices/7)\nJul 19 12:38:37 managed-node2 systemd-udevd[31876]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.\nJul 19 12:38:37 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vetha6d4d23e: link is not ready\nJul 19 12:38:37 managed-node2 systemd-udevd[31876]: Could not generate persistent MAC address for vetha6d4d23e: No such file or directory\nJul 19 12:38:37 managed-node2 kernel: cni-podman1: port 2(vetha6d4d23e) entered blocking state\nJul 19 12:38:37 managed-node2 kernel: cni-podman1: port 2(vetha6d4d23e) entered disabled state\nJul 19 12:38:37 managed-node2 kernel: device vetha6d4d23e entered promiscuous mode\nJul 19 12:38:37 managed-node2 kernel: cni-podman1: port 2(vetha6d4d23e) entered blocking state\nJul 19 12:38:37 managed-node2 kernel: cni-podman1: port 2(vetha6d4d23e) entered forwarding state\nJul 19 12:38:37 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vetha6d4d23e: link becomes ready\nJul 19 12:38:37 managed-node2 NetworkManager[661]: [1752943117.2430] device (vetha6d4d23e): carrier: link connected\nJul 19 12:38:37 managed-node2 dnsmasq[29802]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 2 addresses\nJul 19 12:38:37 managed-node2 systemd[1]: Started libcontainer container 8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.\n-- Subject: Unit libpod-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:37 managed-node2 systemd[1]: Started libcontainer container bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.\n-- Subject: Unit libpod-bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:37 managed-node2 podman[31710]: Pod:\nJul 19 12:38:37 managed-node2 podman[31710]: 78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf\nJul 19 12:38:37 managed-node2 podman[31710]: Container:\nJul 19 12:38:37 managed-node2 podman[31710]: bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c\nJul 19 12:38:37 managed-node2 systemd[1]: Started A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:38 managed-node2 sudo[32108]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dlblsunulyjazdrjwbxpqfeiydnfqnsr ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943118.083963-17014-206060369573326/AnsiballZ_command.py'\nJul 19 12:38:38 managed-node2 sudo[32108]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:38:38 managed-node2 platform-python[32111]: ansible-command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:38 managed-node2 systemd[25528]: Started podman-32120.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:38 managed-node2 sudo[32108]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:38:38 managed-node2 platform-python[32258]: ansible-command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:39 managed-node2 platform-python[32389]: ansible-command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:39 managed-node2 sudo[32520]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-eeofqvmjjevxeutcqzjhxoeuxycorapq ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943119.4551659-17079-9632647473971/AnsiballZ_command.py'\nJul 19 12:38:39 managed-node2 sudo[32520]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:38:39 managed-node2 platform-python[32523]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:39 managed-node2 sudo[32520]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:38:40 managed-node2 platform-python[32649]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:40 managed-node2 platform-python[32775]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:41 managed-node2 platform-python[32901]: ansible-uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:38:41 managed-node2 platform-python[33025]: ansible-uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:38:41 managed-node2 rsyslogd[1019]: imjournal: journal files changed, reloading... [v8.2102.0-15.el8 try https://www.rsyslog.com/e/0 ]\nJul 19 12:38:41 managed-node2 platform-python[33150]: ansible-command Invoked with _raw_params=ls -alrtF /tmp/lsr_s72m6vdk_podman/httpd1-create warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:42 managed-node2 platform-python[33274]: ansible-command Invoked with _raw_params=ls -alrtF /tmp/lsr_s72m6vdk_podman/httpd2-create warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:42 managed-node2 platform-python[33398]: ansible-command Invoked with _raw_params=ls -alrtF /tmp/lsr_s72m6vdk_podman/httpd3-create warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:45 managed-node2 platform-python[33647]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:46 managed-node2 platform-python[33775]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:38:49 managed-node2 platform-python[33900]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:38:52 managed-node2 platform-python[34023]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 19 12:38:52 managed-node2 platform-python[34150]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 19 12:38:53 managed-node2 platform-python[34277]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 19 12:38:55 managed-node2 platform-python[34400]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:38:58 managed-node2 platform-python[34523]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:39:01 managed-node2 platform-python[34646]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:39:03 managed-node2 platform-python[34769]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 19 12:39:05 managed-node2 platform-python[34930]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 19 12:39:06 managed-node2 platform-python[35053]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 19 12:39:10 managed-node2 platform-python[35176]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None\nJul 19 12:39:11 managed-node2 platform-python[35300]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:11 managed-node2 platform-python[35425]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:12 managed-node2 platform-python[35549]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:13 managed-node2 platform-python[35673]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:14 managed-node2 platform-python[35797]: ansible-command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None\nJul 19 12:39:14 managed-node2 platform-python[35920]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:15 managed-node2 platform-python[36043]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:15 managed-node2 sudo[36166]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wbomjbspvcxjkqucdsequagxlcclxnab ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943155.3657928-18727-255969634455656/AnsiballZ_podman_image.py'\nJul 19 12:39:15 managed-node2 sudo[36166]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:39:15 managed-node2 systemd[25528]: Started podman-36171.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:39:15 managed-node2 systemd[25528]: Started podman-36179.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:39:15 managed-node2 systemd[25528]: Started podman-36187.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:39:15 managed-node2 systemd[25528]: Started podman-36195.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:39:15 managed-node2 systemd[25528]: Started podman-36203.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:39:16 managed-node2 systemd[25528]: Started podman-36211.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:39:16 managed-node2 sudo[36166]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:39:16 managed-node2 platform-python[36340]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:17 managed-node2 platform-python[36465]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:17 managed-node2 platform-python[36588]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 19 12:39:17 managed-node2 platform-python[36652]: ansible-file Invoked with owner=podman_basic_user group=3001 mode=0644 dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _original_basename=tmpvcvwgcl1 recurse=False state=file path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:18 managed-node2 sudo[36775]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mwgfktigaxpvesxpytdoaduxtkxpnkwn ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943158.1234493-18828-35756619876473/AnsiballZ_podman_play.py'\nJul 19 12:39:18 managed-node2 sudo[36775]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:39:18 managed-node2 platform-python[36778]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:39:18 managed-node2 systemd[25528]: Started podman-36786.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:39:18 managed-node2 platform-python[36778]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml\nJul 19 12:39:18 managed-node2 platform-python[36778]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: \nJul 19 12:39:18 managed-node2 platform-python[36778]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time=\"2025-07-19T12:39:18-04:00\" level=info msg=\"/bin/podman filtering at log level debug\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2025-07-19T12:39:18-04:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Using graph root /home/podman_basic_user/.local/share/containers/storage\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Using run root /run/user/3001/containers\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Using tmp dir /run/user/3001/libpod/tmp\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Using transient store: false\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Cached value indicated that metacopy is not being used\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Cached value indicated that native-diff is usable\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Initializing event backend file\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\n time=\"2025-07-19T12:39:18-04:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-19 12:38:13.521272 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Successfully loaded 2 networks\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Looking up image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\\\"\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63)\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\\\"\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Pod using bridge network mode\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Got pod cgroup as /libpod_parent/1846ecb89dbdb057faef33ff14bed3ee782f5fffa65b2fd38248f39e0fe82c96\"\n Error: adding pod to state: name \"httpd1\" is in use: pod already exists\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Shutting down engines\"\nJul 19 12:39:18 managed-node2 platform-python[36778]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125\nJul 19 12:39:18 managed-node2 sudo[36775]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:39:19 managed-node2 platform-python[36940]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 19 12:39:20 managed-node2 platform-python[37064]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:21 managed-node2 platform-python[37189]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:22 managed-node2 platform-python[37313]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:22 managed-node2 platform-python[37436]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:23 managed-node2 platform-python[37727]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:24 managed-node2 platform-python[37852]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:24 managed-node2 platform-python[37975]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 19 12:39:24 managed-node2 platform-python[38039]: ansible-file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd2.yml _original_basename=tmpoct5ap5y recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd2.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:25 managed-node2 platform-python[38162]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:39:25 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19.slice.\n-- Subject: Unit machine-libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:39:25 managed-node2 platform-python[38162]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml\nJul 19 12:39:25 managed-node2 platform-python[38162]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: \nJul 19 12:39:25 managed-node2 platform-python[38162]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time=\"2025-07-19T12:39:25-04:00\" level=info msg=\"/usr/bin/podman filtering at log level debug\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2025-07-19T12:39:25-04:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Using run root /run/containers/storage\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Using tmp dir /run/libpod\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Using transient store: false\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\n time=\"2025-07-19T12:39:25-04:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Initializing event backend file\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\n time=\"2025-07-19T12:39:25-04:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-19 12:35:55.640649556 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Successfully loaded 2 networks\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Looking up image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\\\"\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a)\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\\\"\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Pod using bridge network mode\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19.slice for parent machine.slice and name libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19.slice\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19.slice\"\n Error: adding pod to state: name \"httpd2\" is in use: pod already exists\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Shutting down engines\"\nJul 19 12:39:25 managed-node2 platform-python[38162]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125\nJul 19 12:39:26 managed-node2 platform-python[38323]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:27 managed-node2 platform-python[38448]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:28 managed-node2 platform-python[38572]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:29 managed-node2 platform-python[38695]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:30 managed-node2 platform-python[38984]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:31 managed-node2 platform-python[39109]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:31 managed-node2 platform-python[39232]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 19 12:39:31 managed-node2 platform-python[39296]: ansible-file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd3.yml _original_basename=tmp8akhw61j recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd3.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:32 managed-node2 platform-python[39419]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:39:32 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_cb3b0a5bd9971330489767c2081b22911aea01140e5cd90be87bd649d4122b98.slice.\n-- Subject: Unit machine-libpod_pod_cb3b0a5bd9971330489767c2081b22911aea01140e5cd90be87bd649d4122b98.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_cb3b0a5bd9971330489767c2081b22911aea01140e5cd90be87bd649d4122b98.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:39:33 managed-node2 sudo[39581]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hrgfwbfwhnrhozyrljgrwuftlafbcvgj ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943172.9515133-19598-262459141060618/AnsiballZ_command.py'\nJul 19 12:39:33 managed-node2 sudo[39581]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:39:33 managed-node2 platform-python[39584]: ansible-command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:33 managed-node2 systemd[25528]: Started podman-39593.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:39:33 managed-node2 sudo[39581]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:39:33 managed-node2 platform-python[39723]: ansible-command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:34 managed-node2 platform-python[39854]: ansible-command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:34 managed-node2 sudo[39985]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kapxhijpsmqpbluyhztflsgdqehfqzsf ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943174.2658803-19637-151873258581255/AnsiballZ_command.py'\nJul 19 12:39:34 managed-node2 sudo[39985]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:39:34 managed-node2 platform-python[39988]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:34 managed-node2 sudo[39985]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:39:34 managed-node2 platform-python[40114]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:35 managed-node2 platform-python[40240]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:35 managed-node2 platform-python[40366]: ansible-uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:36 managed-node2 platform-python[40490]: ansible-uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:36 managed-node2 platform-python[40614]: ansible-uri Invoked with url=http://localhost:15003/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:39 managed-node2 platform-python[40863]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:40 managed-node2 platform-python[40992]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:43 managed-node2 platform-python[41117]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None\nJul 19 12:39:44 managed-node2 platform-python[41241]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:44 managed-node2 platform-python[41366]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:44 managed-node2 platform-python[41490]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:45 managed-node2 platform-python[41614]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:46 managed-node2 platform-python[41738]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:46 managed-node2 sudo[41863]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bsbqhhxxcupnkafozyypbjqjdnfplilf ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943186.6742442-20261-89535707061551/AnsiballZ_systemd.py'\nJul 19 12:39:46 managed-node2 sudo[41863]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:39:47 managed-node2 platform-python[41866]: ansible-systemd Invoked with name= scope=user state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 19 12:39:47 managed-node2 systemd[25528]: Reloading.\nJul 19 12:39:47 managed-node2 systemd[25528]: Stopping A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit UNIT has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has begun shutting down.\nJul 19 12:39:47 managed-node2 kernel: cni-podman1: port 1(veth24653eaf) entered disabled state\nJul 19 12:39:47 managed-node2 kernel: device veth24653eaf left promiscuous mode\nJul 19 12:39:47 managed-node2 kernel: cni-podman1: port 1(veth24653eaf) entered disabled state\nJul 19 12:39:47 managed-node2 podman[41882]: Pods stopped:\nJul 19 12:39:47 managed-node2 podman[41882]: 2d1d21944d4ca561d0a237f029182068a925bb49513d0d920c7ad527b530a87a\nJul 19 12:39:47 managed-node2 podman[41882]: Pods removed:\nJul 19 12:39:47 managed-node2 podman[41882]: 2d1d21944d4ca561d0a237f029182068a925bb49513d0d920c7ad527b530a87a\nJul 19 12:39:47 managed-node2 podman[41882]: Secrets removed:\nJul 19 12:39:47 managed-node2 podman[41882]: Volumes removed:\nJul 19 12:39:47 managed-node2 systemd[25528]: Stopped A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 19 12:39:47 managed-node2 sudo[41863]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:39:48 managed-node2 platform-python[42156]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:48 managed-node2 sudo[42281]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rkhrtubvxxvbakhpbdfuonkadzawohqm ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943188.1900835-20342-278852314562176/AnsiballZ_podman_play.py'\nJul 19 12:39:48 managed-node2 sudo[42281]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:39:48 managed-node2 platform-python[42284]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:39:48 managed-node2 platform-python[42284]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml\nJul 19 12:39:48 managed-node2 systemd[25528]: Started podman-42292.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:39:48 managed-node2 platform-python[42284]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman kube play --down /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml\nJul 19 12:39:48 managed-node2 platform-python[42284]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped:\n Pods removed:\n Secrets removed:\n Volumes removed:\nJul 19 12:39:48 managed-node2 platform-python[42284]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: \nJul 19 12:39:48 managed-node2 platform-python[42284]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0\nJul 19 12:39:48 managed-node2 sudo[42281]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:39:48 managed-node2 platform-python[42421]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:50 managed-node2 platform-python[42544]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 19 12:39:50 managed-node2 platform-python[42668]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:51 managed-node2 platform-python[42793]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:52 managed-node2 platform-python[42917]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 19 12:39:52 managed-node2 systemd[1]: Reloading.\nJul 19 12:39:52 managed-node2 systemd[1]: Stopping A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun shutting down.\nJul 19 12:39:52 managed-node2 systemd[1]: libpod-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.scope has successfully entered the 'dead' state.\nJul 19 12:39:52 managed-node2 systemd[1]: libpod-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.scope completed and consumed the indicated resources.\nJul 19 12:39:52 managed-node2 systemd[1]: libpod-d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.scope has successfully entered the 'dead' state.\nJul 19 12:39:52 managed-node2 systemd[1]: libpod-d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.scope completed and consumed the indicated resources.\nJul 19 12:39:52 managed-node2 dnsmasq[29802]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses\nJul 19 12:39:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay-1ac1165dcb590ce00bffba4600c63f5cfb3b70afb8f380b4edeace6635fcdfe3-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-1ac1165dcb590ce00bffba4600c63f5cfb3b70afb8f380b4edeace6635fcdfe3-merged.mount has successfully entered the 'dead' state.\nJul 19 12:39:52 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered disabled state\nJul 19 12:39:52 managed-node2 kernel: device vetha38befe0 left promiscuous mode\nJul 19 12:39:52 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered disabled state\nJul 19 12:39:52 managed-node2 systemd[1]: run-netns-netns\\x2d85ef15c4\\x2d2df7\\x2d918e\\x2d907f\\x2dc88b265faa98.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-netns-netns\\x2d85ef15c4\\x2d2df7\\x2d918e\\x2d907f\\x2dc88b265faa98.mount has successfully entered the 'dead' state.\nJul 19 12:39:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3-userdata-shm.mount has successfully entered the 'dead' state.\nJul 19 12:39:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay-38daa2f903ec0433792b188cb05d307d74de74874667479598255b129c8e533b-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-38daa2f903ec0433792b188cb05d307d74de74874667479598255b129c8e533b-merged.mount has successfully entered the 'dead' state.\nJul 19 12:39:52 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice.\n-- Subject: Unit machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice has finished shutting down.\nJul 19 12:39:52 managed-node2 systemd[1]: machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice: Consumed 66ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice completed and consumed the indicated resources.\nJul 19 12:39:52 managed-node2 podman[42953]: Pods stopped:\nJul 19 12:39:52 managed-node2 podman[42953]: 63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7\nJul 19 12:39:52 managed-node2 podman[42953]: Pods removed:\nJul 19 12:39:52 managed-node2 podman[42953]: 63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7\nJul 19 12:39:52 managed-node2 podman[42953]: Secrets removed:\nJul 19 12:39:52 managed-node2 podman[42953]: Volumes removed:\nJul 19 12:39:52 managed-node2 systemd[1]: libpod-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.scope has successfully entered the 'dead' state.\nJul 19 12:39:52 managed-node2 systemd[1]: libpod-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.scope: Consumed 34ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.scope completed and consumed the indicated resources.\nJul 19 12:39:53 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a-userdata-shm.mount has successfully entered the 'dead' state.\nJul 19 12:39:53 managed-node2 dnsmasq[29802]: exiting on receipt of SIGTERM\nJul 19 12:39:53 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has successfully entered the 'dead' state.\nJul 19 12:39:53 managed-node2 systemd[1]: Stopped A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished shutting down.\nJul 19 12:39:53 managed-node2 platform-python[43230]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:53 managed-node2 systemd[1]: var-lib-containers-storage-overlay-946e50296936b22c6a0cd6493841882848a8040824e6c32355272e3fbcd82469-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-946e50296936b22c6a0cd6493841882848a8040824e6c32355272e3fbcd82469-merged.mount has successfully entered the 'dead' state.\nJul 19 12:39:53 managed-node2 platform-python[43355]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:39:53 managed-node2 platform-python[43355]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/httpd2.yml\nJul 19 12:39:53 managed-node2 platform-python[43355]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman kube play --down /etc/containers/ansible-kubernetes.d/httpd2.yml\nJul 19 12:39:53 managed-node2 platform-python[43355]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped:\n Pods removed:\n Secrets removed:\n Volumes removed:\nJul 19 12:39:53 managed-node2 platform-python[43355]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: \nJul 19 12:39:53 managed-node2 platform-python[43355]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0\nJul 19 12:39:54 managed-node2 platform-python[43492]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:55 managed-node2 platform-python[43615]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:56 managed-node2 platform-python[43740]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:57 managed-node2 platform-python[43864]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 19 12:39:57 managed-node2 systemd[1]: Reloading.\nJul 19 12:39:57 managed-node2 systemd[1]: Stopping A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun shutting down.\nJul 19 12:39:57 managed-node2 systemd[1]: libpod-bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.scope has successfully entered the 'dead' state.\nJul 19 12:39:57 managed-node2 systemd[1]: libpod-bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.scope completed and consumed the indicated resources.\nJul 19 12:39:57 managed-node2 systemd[1]: libpod-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.scope has successfully entered the 'dead' state.\nJul 19 12:39:57 managed-node2 systemd[1]: libpod-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.scope completed and consumed the indicated resources.\nJul 19 12:39:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay-e4d65582a94e2bdc8a1eaad3e5573271c39b373e604383029d5c678d2ac244a1-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-e4d65582a94e2bdc8a1eaad3e5573271c39b373e604383029d5c678d2ac244a1-merged.mount has successfully entered the 'dead' state.\nJul 19 12:39:57 managed-node2 kernel: cni-podman1: port 2(vetha6d4d23e) entered disabled state\nJul 19 12:39:57 managed-node2 kernel: device vetha6d4d23e left promiscuous mode\nJul 19 12:39:57 managed-node2 kernel: cni-podman1: port 2(vetha6d4d23e) entered disabled state\nJul 19 12:39:57 managed-node2 systemd[1]: run-netns-netns\\x2d4c7240ed\\x2da995\\x2deb80\\x2d20f7\\x2d420676ba3f43.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-netns-netns\\x2d4c7240ed\\x2da995\\x2deb80\\x2d20f7\\x2d420676ba3f43.mount has successfully entered the 'dead' state.\nJul 19 12:39:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6-userdata-shm.mount has successfully entered the 'dead' state.\nJul 19 12:39:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay-9dcce2629a28829a7d47e45b786a91b8326ac6500c27209769c1539d9f082e74-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-9dcce2629a28829a7d47e45b786a91b8326ac6500c27209769c1539d9f082e74-merged.mount has successfully entered the 'dead' state.\nJul 19 12:39:57 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice.\n-- Subject: Unit machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice has finished shutting down.\nJul 19 12:39:57 managed-node2 systemd[1]: machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice: Consumed 67ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice completed and consumed the indicated resources.\nJul 19 12:39:58 managed-node2 podman[43900]: Pods stopped:\nJul 19 12:39:58 managed-node2 podman[43900]: 78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf\nJul 19 12:39:58 managed-node2 podman[43900]: Pods removed:\nJul 19 12:39:58 managed-node2 podman[43900]: 78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf\nJul 19 12:39:58 managed-node2 podman[43900]: Secrets removed:\nJul 19 12:39:58 managed-node2 podman[43900]: Volumes removed:\nJul 19 12:39:58 managed-node2 systemd[1]: libpod-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.scope has successfully entered the 'dead' state.\nJul 19 12:39:58 managed-node2 systemd[1]: libpod-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.scope completed and consumed the indicated resources.\nJul 19 12:39:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c-userdata-shm.mount has successfully entered the 'dead' state.\nJul 19 12:39:58 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has successfully entered the 'dead' state.\nJul 19 12:39:58 managed-node2 systemd[1]: Stopped A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished shutting down.\nJul 19 12:39:58 managed-node2 platform-python[44169]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay-c944fbd43e1673bcb0e2412bde6d753cffca05c01ef505aa29441df09b37e4f0-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-c944fbd43e1673bcb0e2412bde6d753cffca05c01ef505aa29441df09b37e4f0-merged.mount has successfully entered the 'dead' state.\nJul 19 12:39:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 19 12:39:58 managed-node2 platform-python[44294]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:39:58 managed-node2 platform-python[44294]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/httpd3.yml\nJul 19 12:39:59 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 19 12:39:59 managed-node2 platform-python[44430]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:40:00 managed-node2 platform-python[44553]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None\nJul 19 12:40:01 managed-node2 platform-python[44677]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:01 managed-node2 sudo[44802]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zjfnlsrhavffrrytzlezjdprwiglzjsy ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943201.3476377-20989-80415109164671/AnsiballZ_podman_container_info.py'\nJul 19 12:40:01 managed-node2 sudo[44802]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:40:01 managed-node2 platform-python[44805]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None\nJul 19 12:40:01 managed-node2 systemd[25528]: Started podman-44807.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:40:01 managed-node2 sudo[44802]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:40:02 managed-node2 sudo[44936]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kqzzrdenthgaimpshaehnjtpvgbskyzt ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943201.9272537-21012-43022464511697/AnsiballZ_command.py'\nJul 19 12:40:02 managed-node2 sudo[44936]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:40:02 managed-node2 platform-python[44939]: ansible-command Invoked with _raw_params=podman network ls -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:02 managed-node2 systemd[25528]: Started podman-44941.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:40:02 managed-node2 sudo[44936]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:40:02 managed-node2 sudo[45096]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wfpjnxfzctwnxksvtryonkmxvjvkhduq ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943202.41182-21042-156572471435781/AnsiballZ_command.py'\nJul 19 12:40:02 managed-node2 sudo[45096]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:40:02 managed-node2 platform-python[45099]: ansible-command Invoked with _raw_params=podman secret ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:02 managed-node2 systemd[25528]: Started podman-45101.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:40:02 managed-node2 sudo[45096]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:40:03 managed-node2 platform-python[45230]: ansible-command Invoked with removes=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl disable-linger podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None stdin=None\nJul 19 12:40:03 managed-node2 systemd[1]: Stopping User Manager for UID 3001...\n-- Subject: Unit user@3001.service has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user@3001.service has begun shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Stopped target Default.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Stopping D-Bus User Message Bus...\n-- Subject: Unit UNIT has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has begun shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Removed slice podman\\x2dkube.slice.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Stopping podman-pause-1458d7a0.scope.\n-- Subject: Unit UNIT has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has begun shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Stopped D-Bus User Message Bus.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Stopped target Basic System.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Stopped target Timers.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Stopped Mark boot as successful after the user session has run 2 minutes.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Stopped target Sockets.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Stopped target Paths.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Closed D-Bus User Message Bus Socket.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Stopped podman-pause-1458d7a0.scope.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Removed slice user.slice.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Reached target Shutdown.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:40:03 managed-node2 systemd[25528]: Started Exit the Session.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:40:03 managed-node2 systemd[25528]: Reached target Exit the Session.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:40:03 managed-node2 systemd[1]: user@3001.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit user@3001.service has successfully entered the 'dead' state.\nJul 19 12:40:03 managed-node2 systemd[1]: Stopped User Manager for UID 3001.\n-- Subject: Unit user@3001.service has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user@3001.service has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[1]: Stopping User runtime directory /run/user/3001...\n-- Subject: Unit user-runtime-dir@3001.service has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user-runtime-dir@3001.service has begun shutting down.\nJul 19 12:40:03 managed-node2 systemd[1]: run-user-3001.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-user-3001.mount has successfully entered the 'dead' state.\nJul 19 12:40:03 managed-node2 systemd[1]: user-runtime-dir@3001.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit user-runtime-dir@3001.service has successfully entered the 'dead' state.\nJul 19 12:40:03 managed-node2 systemd[1]: Stopped User runtime directory /run/user/3001.\n-- Subject: Unit user-runtime-dir@3001.service has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user-runtime-dir@3001.service has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[1]: Removed slice User Slice of UID 3001.\n-- Subject: Unit user-3001.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user-3001.slice has finished shutting down.\nJul 19 12:40:03 managed-node2 platform-python[45362]: ansible-command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:04 managed-node2 sudo[45486]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dlpvabpxjyeqjdweosdqhelprmnnraei ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943204.1405673-21146-194288526342265/AnsiballZ_command.py'\nJul 19 12:40:04 managed-node2 sudo[45486]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:40:04 managed-node2 platform-python[45489]: ansible-command Invoked with _raw_params=podman pod exists httpd1 warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:04 managed-node2 sudo[45486]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:40:04 managed-node2 platform-python[45619]: ansible-command Invoked with _raw_params=podman pod exists httpd2 warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:05 managed-node2 platform-python[45749]: ansible-command Invoked with _raw_params=podman pod exists httpd3 warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:05 managed-node2 sudo[45879]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uhnqljkrbofbppqpfvsxbbfqaxcbzbnz ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943205.3022838-21198-91872805319796/AnsiballZ_command.py'\nJul 19 12:40:05 managed-node2 sudo[45879]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:40:05 managed-node2 platform-python[45882]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:05 managed-node2 sudo[45879]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:40:05 managed-node2 platform-python[46008]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:06 managed-node2 platform-python[46134]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:06 managed-node2 platform-python[46260]: ansible-stat Invoked with path=/var/lib/systemd/linger/podman_basic_user follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:09 managed-node2 platform-python[46508]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:10 managed-node2 platform-python[46637]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 19 12:40:11 managed-node2 platform-python[46761]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:13 managed-node2 platform-python[46886]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None\nJul 19 12:40:14 managed-node2 platform-python[47010]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:14 managed-node2 platform-python[47135]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:15 managed-node2 platform-python[47259]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:16 managed-node2 platform-python[47383]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:16 managed-node2 platform-python[47507]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:17 managed-node2 platform-python[47630]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:17 managed-node2 platform-python[47753]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:40:19 managed-node2 platform-python[47876]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 19 12:40:19 managed-node2 platform-python[48000]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:20 managed-node2 platform-python[48125]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:21 managed-node2 platform-python[48249]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 19 12:40:22 managed-node2 platform-python[48376]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:22 managed-node2 platform-python[48499]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:40:23 managed-node2 platform-python[48622]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:25 managed-node2 platform-python[48747]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:25 managed-node2 platform-python[48871]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 19 12:40:26 managed-node2 platform-python[48998]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:26 managed-node2 platform-python[49121]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:40:28 managed-node2 platform-python[49244]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None\nJul 19 12:40:28 managed-node2 platform-python[49368]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:29 managed-node2 platform-python[49491]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:40:29 managed-node2 platform-python[49614]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:40:32 managed-node2 platform-python[49776]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version', 'os_family'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d\nJul 19 12:40:33 managed-node2 platform-python[49903]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:33 managed-node2 platform-python[50026]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:36 managed-node2 platform-python[50274]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:37 managed-node2 platform-python[50403]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 19 12:40:37 managed-node2 platform-python[50527]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:41 managed-node2 platform-python[50691]: ansible-setup Invoked with gather_subset=['all'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d\nJul 19 12:40:44 managed-node2 platform-python[50843]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:45 managed-node2 platform-python[50966]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:47 managed-node2 platform-python[51214]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:48 managed-node2 platform-python[51343]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 19 12:40:48 managed-node2 platform-python[51467]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:54 managed-node2 platform-python[51631]: ansible-setup Invoked with gather_subset=['all'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d\nJul 19 12:40:54 managed-node2 platform-python[51783]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:55 managed-node2 platform-python[51906]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:56 managed-node2 platform-python[52030]: ansible-dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:40:59 managed-node2 platform-python[52158]: ansible-dnf Invoked with name=['certmonger'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:41:02 managed-node2 dbus-daemon[591]: [system] Reloaded configuration\nJul 19 12:41:02 managed-node2 dbus-daemon[591]: [system] Reloaded configuration\nJul 19 12:41:02 managed-node2 dbus-daemon[591]: [system] Reloaded configuration\nJul 19 12:41:02 managed-node2 dbus-daemon[591]: [system] Reloaded configuration\nJul 19 12:41:02 managed-node2 dbus-daemon[591]: [system] Reloaded configuration\nJul 19 12:41:02 managed-node2 systemd[1]: Reloading.\nJul 19 12:41:02 managed-node2 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update.\n-- Subject: Unit run-r2a2b61b169e54534b6bc9888468488f1.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit run-r2a2b61b169e54534b6bc9888468488f1.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:41:02 managed-node2 systemd[1]: Starting man-db-cache-update.service...\n-- Subject: Unit man-db-cache-update.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit man-db-cache-update.service has begun starting up.\nJul 19 12:41:02 managed-node2 systemd[1]: Reloading.\nJul 19 12:41:03 managed-node2 systemd[1]: man-db-cache-update.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit man-db-cache-update.service has successfully entered the 'dead' state.\nJul 19 12:41:03 managed-node2 systemd[1]: Started man-db-cache-update.service.\n-- Subject: Unit man-db-cache-update.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit man-db-cache-update.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:41:03 managed-node2 systemd[1]: run-r2a2b61b169e54534b6bc9888468488f1.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-r2a2b61b169e54534b6bc9888468488f1.service has successfully entered the 'dead' state.\nJul 19 12:41:04 managed-node2 platform-python[52790]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:41:04 managed-node2 platform-python[52913]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:41:05 managed-node2 platform-python[53036]: ansible-systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 19 12:41:05 managed-node2 systemd[1]: Reloading.\nJul 19 12:41:05 managed-node2 systemd[1]: Starting dnf makecache...\n-- Subject: Unit dnf-makecache.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit dnf-makecache.service has begun starting up.\nJul 19 12:41:05 managed-node2 systemd[1]: Starting Certificate monitoring and PKI enrollment...\n-- Subject: Unit certmonger.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit certmonger.service has begun starting up.\nJul 19 12:41:05 managed-node2 systemd[1]: Started Certificate monitoring and PKI enrollment.\n-- Subject: Unit certmonger.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit certmonger.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:41:05 managed-node2 dnf[53070]: Failed determining last makecache time.\nJul 19 12:41:05 managed-node2 dnf[53070]: CentOS Stream 8 - AppStream 124 kB/s | 4.4 kB 00:00\nJul 19 12:41:05 managed-node2 dnf[53070]: CentOS Stream 8 - BaseOS 100 kB/s | 3.9 kB 00:00\nJul 19 12:41:06 managed-node2 dnf[53070]: CentOS Stream 8 - Extras 83 kB/s | 2.9 kB 00:00\nJul 19 12:41:06 managed-node2 platform-python[53233]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=#\n # Ansible managed\n #\n # system_role:certificate\n booted=True provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None\nJul 19 12:41:06 managed-node2 dnf[53070]: CentOS Stream 8 - Extras common packages 74 kB/s | 3.0 kB 00:00\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 dnf[53070]: CentOS Stream 8 - HighAvailability 36 kB/s | 3.9 kB 00:00\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53258]: Certificate in file \"/etc/pki/tls/certs/quadlet_demo.crt\" issued by CA and saved.\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 dnf[53070]: Beaker Client - RedHatEnterpriseLinux8 8.8 kB/s | 1.5 kB 00:00\nJul 19 12:41:06 managed-node2 dnf[53070]: Beaker harness 13 kB/s | 1.3 kB 00:00\nJul 19 12:41:06 managed-node2 dnf[53070]: Copr repo for beakerlib-libraries owned by bgon 13 kB/s | 1.8 kB 00:00\nJul 19 12:41:06 managed-node2 dnf[53070]: Extra Packages for Enterprise Linux 8 - x86_64 413 kB/s | 35 kB 00:00\nJul 19 12:41:06 managed-node2 platform-python[53383]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt\nJul 19 12:41:07 managed-node2 dnf[53070]: Extra Packages for Enterprise Linux 8 - x86_64 281 kB/s | 21 kB 00:00\nJul 19 12:41:07 managed-node2 platform-python[53508]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key\nJul 19 12:41:07 managed-node2 dnf[53070]: Extra Packages for Enterprise Linux 8 - x86_64 81 kB/s | 25 kB 00:00\nJul 19 12:41:07 managed-node2 dnf[53070]: Copr repo for qa-tools owned by lpol 33 kB/s | 1.8 kB 00:00\nJul 19 12:41:07 managed-node2 platform-python[53632]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt\nJul 19 12:41:08 managed-node2 platform-python[53755]: ansible-command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:41:08 managed-node2 certmonger[53073]: 2025-07-19 12:41:08 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:08 managed-node2 platform-python[53879]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:41:09 managed-node2 platform-python[54002]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:41:09 managed-node2 platform-python[54125]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:41:09 managed-node2 dnf[53070]: Metadata cache created.\nJul 19 12:41:10 managed-node2 systemd[1]: dnf-makecache.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit dnf-makecache.service has successfully entered the 'dead' state.\nJul 19 12:41:10 managed-node2 systemd[1]: Started dnf makecache.\n-- Subject: Unit dnf-makecache.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit dnf-makecache.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:41:10 managed-node2 platform-python[54249]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:41:10 managed-node2 platform-python[54372]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:41:13 managed-node2 platform-python[54620]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:41:14 managed-node2 platform-python[54749]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 19 12:41:14 managed-node2 platform-python[54873]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:41:16 managed-node2 platform-python[54998]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:41:17 managed-node2 platform-python[55121]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:41:17 managed-node2 platform-python[55244]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:41:18 managed-node2 platform-python[55368]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:41:21 managed-node2 platform-python[55491]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 19 12:41:21 managed-node2 platform-python[55618]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 19 12:41:22 managed-node2 platform-python[55745]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 19 12:41:23 managed-node2 platform-python[55868]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 19 12:41:25 managed-node2 platform-python[55991]: ansible-command Invoked with _raw_params=journalctl -ex warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None", "task_name": "Dump journal", "task_path": "/tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:142" }, { "ansible_version": "2.9.27", "end_time": "2025-07-19T16:41:38.092796+00:00Z", "host": "managed-node2", "message": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "start_time": "2025-07-19T16:41:38.076469+00:00Z", "task_name": "Manage each secret", "task_path": "/tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41" }, { "ansible_version": "2.9.27", "delta": "0:00:00.026359", "end_time": "2025-07-19 12:41:39.138418", "host": "managed-node2", "message": "No message could be found", "rc": 0, "start_time": "2025-07-19 12:41:39.112059", "stdout": "-- Logs begin at Sat 2025-07-19 12:30:11 EDT, end at Sat 2025-07-19 12:41:39 EDT. --\nJul 19 12:35:33 managed-node2 platform-python[13361]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:35:34 managed-node2 platform-python[13484]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:35:37 managed-node2 platform-python[13607]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:35:39 managed-node2 platform-python[13730]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:35:42 managed-node2 dbus-daemon[591]: [system] Reloaded configuration\nJul 19 12:35:42 managed-node2 dbus-daemon[591]: [system] Reloaded configuration\nJul 19 12:35:42 managed-node2 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update.\n-- Subject: Unit run-r58006eb3d48a46a9a552c0899f8af7ac.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit run-r58006eb3d48a46a9a552c0899f8af7ac.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:35:42 managed-node2 systemd[1]: Starting man-db-cache-update.service...\n-- Subject: Unit man-db-cache-update.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit man-db-cache-update.service has begun starting up.\nJul 19 12:35:42 managed-node2 systemd[1]: man-db-cache-update.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit man-db-cache-update.service has successfully entered the 'dead' state.\nJul 19 12:35:42 managed-node2 systemd[1]: Started man-db-cache-update.service.\n-- Subject: Unit man-db-cache-update.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit man-db-cache-update.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:35:42 managed-node2 systemd[1]: run-r58006eb3d48a46a9a552c0899f8af7ac.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-r58006eb3d48a46a9a552c0899f8af7ac.service has successfully entered the 'dead' state.\nJul 19 12:35:43 managed-node2 platform-python[14335]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 19 12:35:43 managed-node2 platform-python[14483]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:35:45 managed-node2 platform-python[14607]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 19 12:35:46 managed-node2 kernel: SELinux: Converting 460 SID table entries...\nJul 19 12:35:46 managed-node2 kernel: SELinux: policy capability network_peer_controls=1\nJul 19 12:35:46 managed-node2 kernel: SELinux: policy capability open_perms=1\nJul 19 12:35:46 managed-node2 kernel: SELinux: policy capability extended_socket_class=1\nJul 19 12:35:46 managed-node2 kernel: SELinux: policy capability always_check_network=0\nJul 19 12:35:46 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1\nJul 19 12:35:46 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1\nJul 19 12:35:46 managed-node2 dbus-daemon[591]: [system] Reloaded configuration\nJul 19 12:35:47 managed-node2 platform-python[14734]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 19 12:35:51 managed-node2 platform-python[14857]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:35:53 managed-node2 platform-python[14982]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:35:54 managed-node2 platform-python[15105]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:35:54 managed-node2 platform-python[15228]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 19 12:35:54 managed-node2 platform-python[15327]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/nopull.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752942954.3752043-9946-72044176595742/source _original_basename=tmpi7ylefvg follow=False checksum=d5dc917e3cae36de03aa971a17ac473f86fdf934 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None\nJul 19 12:35:55 managed-node2 platform-python[15452]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:35:55 managed-node2 kernel: evm: overlay not supported\nJul 19 12:35:55 managed-node2 systemd[1]: var-lib-containers-storage-overlay-metacopy\\x2dcheck103626253-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-metacopy\\x2dcheck103626253-merged.mount has successfully entered the 'dead' state.\nJul 19 12:35:55 managed-node2 systemd[1]: Created slice machine.slice.\n-- Subject: Unit machine.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:35:55 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice.\n-- Subject: Unit machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:35:56 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 19 12:36:00 managed-node2 platform-python[15778]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:36:01 managed-node2 platform-python[15907]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:36:04 managed-node2 platform-python[16032]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:36:07 managed-node2 platform-python[16155]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 19 12:36:08 managed-node2 platform-python[16282]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 19 12:36:09 managed-node2 platform-python[16409]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 19 12:36:10 managed-node2 platform-python[16532]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:36:13 managed-node2 platform-python[16655]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:36:16 managed-node2 platform-python[16778]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:36:19 managed-node2 platform-python[16901]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 19 12:36:21 managed-node2 platform-python[17049]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 19 12:36:22 managed-node2 platform-python[17172]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 19 12:36:26 managed-node2 platform-python[17295]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:36:28 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 19 12:36:28 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 19 12:36:29 managed-node2 platform-python[17558]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:36:29 managed-node2 platform-python[17681]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:36:29 managed-node2 platform-python[17804]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 19 12:36:30 managed-node2 platform-python[17903]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/bogus.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752942989.6065838-11409-115866441513393/source _original_basename=tmpinaqg9cl follow=False checksum=f8266a972ed3be7e204d2a67883fe3a22b8dbf18 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None\nJul 19 12:36:30 managed-node2 platform-python[18028]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:36:30 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice.\n-- Subject: Unit machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:36:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 19 12:36:34 managed-node2 platform-python[18315]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:36:35 managed-node2 platform-python[18444]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:36:37 managed-node2 platform-python[18569]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:36:40 managed-node2 platform-python[18692]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 19 12:36:41 managed-node2 platform-python[18819]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 19 12:36:41 managed-node2 platform-python[18946]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 19 12:36:43 managed-node2 platform-python[19069]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:36:46 managed-node2 platform-python[19192]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:36:49 managed-node2 platform-python[19315]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:36:52 managed-node2 platform-python[19438]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 19 12:36:54 managed-node2 platform-python[19586]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 19 12:36:54 managed-node2 platform-python[19709]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 19 12:36:59 managed-node2 platform-python[19832]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:37:00 managed-node2 platform-python[19957]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:37:01 managed-node2 platform-python[20081]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 19 12:37:01 managed-node2 platform-python[20208]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:37:02 managed-node2 platform-python[20333]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:37:02 managed-node2 platform-python[20333]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/nopull.yml\nJul 19 12:37:02 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice.\n-- Subject: Unit machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice has finished shutting down.\nJul 19 12:37:02 managed-node2 systemd[1]: machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice: Consumed 0 CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_3d5c702ba103fedfcd19ae1b2e6991368b0fce21c6e8d478b3ecaab45f17ca2f.slice completed and consumed the indicated resources.\nJul 19 12:37:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 19 12:37:02 managed-node2 platform-python[20471]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:37:03 managed-node2 platform-python[20594]: ansible-command Invoked with _raw_params=podman image prune -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:37:06 managed-node2 platform-python[20849]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:37:07 managed-node2 platform-python[20978]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:37:10 managed-node2 platform-python[21103]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:37:13 managed-node2 platform-python[21226]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 19 12:37:14 managed-node2 platform-python[21353]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 19 12:37:15 managed-node2 platform-python[21480]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 19 12:37:16 managed-node2 platform-python[21603]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:37:19 managed-node2 platform-python[21726]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:37:22 managed-node2 platform-python[21849]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:37:25 managed-node2 platform-python[21972]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 19 12:37:27 managed-node2 platform-python[22120]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 19 12:37:28 managed-node2 platform-python[22243]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 19 12:37:32 managed-node2 platform-python[22366]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:37:34 managed-node2 platform-python[22491]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:37:34 managed-node2 platform-python[22615]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 19 12:37:35 managed-node2 platform-python[22742]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:37:35 managed-node2 platform-python[22867]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:37:35 managed-node2 platform-python[22867]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/bogus.yml\nJul 19 12:37:35 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice.\n-- Subject: Unit machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice has finished shutting down.\nJul 19 12:37:35 managed-node2 systemd[1]: machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice: Consumed 0 CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_a690f27e0188f858a3240ac85dd6b3186fca3e9e0f6449a15954346af6c407cd.slice completed and consumed the indicated resources.\nJul 19 12:37:35 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 19 12:37:36 managed-node2 platform-python[23006]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:37:36 managed-node2 platform-python[23129]: ansible-command Invoked with _raw_params=podman image prune -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:37:40 managed-node2 platform-python[23384]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:37:41 managed-node2 platform-python[23513]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:37:44 managed-node2 platform-python[23638]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:37:47 managed-node2 platform-python[23761]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 19 12:37:47 managed-node2 platform-python[23888]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 19 12:37:48 managed-node2 platform-python[24015]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 19 12:37:50 managed-node2 platform-python[24138]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:37:52 managed-node2 platform-python[24261]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:37:55 managed-node2 platform-python[24384]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:37:58 managed-node2 platform-python[24507]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 19 12:38:00 managed-node2 platform-python[24655]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 19 12:38:01 managed-node2 platform-python[24778]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 19 12:38:05 managed-node2 platform-python[24901]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None\nJul 19 12:38:06 managed-node2 platform-python[25025]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:38:06 managed-node2 platform-python[25150]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:06 managed-node2 platform-python[25274]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:08 managed-node2 platform-python[25398]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:08 managed-node2 platform-python[25522]: ansible-command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None\nJul 19 12:38:08 managed-node2 systemd[1]: Created slice User Slice of UID 3001.\n-- Subject: Unit user-3001.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user-3001.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:08 managed-node2 systemd[1]: Starting User runtime directory /run/user/3001...\n-- Subject: Unit user-runtime-dir@3001.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user-runtime-dir@3001.service has begun starting up.\nJul 19 12:38:08 managed-node2 systemd[1]: Started User runtime directory /run/user/3001.\n-- Subject: Unit user-runtime-dir@3001.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user-runtime-dir@3001.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:08 managed-node2 systemd[1]: Starting User Manager for UID 3001...\n-- Subject: Unit user@3001.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user@3001.service has begun starting up.\nJul 19 12:38:08 managed-node2 systemd[25528]: pam_unix(systemd-user:session): session opened for user podman_basic_user by (uid=0)\nJul 19 12:38:08 managed-node2 systemd[25528]: Reached target Paths.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:08 managed-node2 systemd[25528]: Starting D-Bus User Message Bus Socket.\n-- Subject: Unit UNIT has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has begun starting up.\nJul 19 12:38:08 managed-node2 systemd[25528]: Started Mark boot as successful after the user session has run 2 minutes.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:08 managed-node2 systemd[25528]: Reached target Timers.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:08 managed-node2 systemd[25528]: Listening on D-Bus User Message Bus Socket.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:08 managed-node2 systemd[25528]: Reached target Sockets.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:08 managed-node2 systemd[25528]: Reached target Basic System.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:08 managed-node2 systemd[25528]: Reached target Default.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:08 managed-node2 systemd[25528]: Startup finished in 28ms.\n-- Subject: User manager start-up is now complete\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The user manager instance for user 3001 has been started. All services queued\n-- for starting have been started. Note that other services might still be starting\n-- up or be started at any later time.\n-- \n-- Startup of the manager took 28712 microseconds.\nJul 19 12:38:08 managed-node2 systemd[1]: Started User Manager for UID 3001.\n-- Subject: Unit user@3001.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user@3001.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:09 managed-node2 platform-python[25663]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:38:10 managed-node2 platform-python[25786]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:38:10 managed-node2 sudo[25909]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ziadsxoqzpztrsztgsdmjtfdqrqqbghq ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943090.202407-15734-110385486361950/AnsiballZ_podman_image.py'\nJul 19 12:38:10 managed-node2 sudo[25909]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:38:10 managed-node2 systemd[25528]: Started D-Bus User Message Bus.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:10 managed-node2 systemd[25528]: Created slice user.slice.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:10 managed-node2 systemd[25528]: Started podman-25921.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:10 managed-node2 systemd[25528]: Started podman-pause-1458d7a0.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:10 managed-node2 systemd[25528]: Started podman-25939.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:11 managed-node2 systemd[25528]: Started podman-25955.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:11 managed-node2 sudo[25909]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:38:11 managed-node2 platform-python[26084]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:38:12 managed-node2 platform-python[26207]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:38:12 managed-node2 platform-python[26330]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 19 12:38:13 managed-node2 platform-python[26429]: ansible-copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752943092.4331284-15846-26644570363473/source _original_basename=tmp0dg28w0o follow=False checksum=fe0b16bd085957dfbf8e2496934305469d165478 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None\nJul 19 12:38:13 managed-node2 sudo[26554]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-louqgipewhnyaovmbewiqaddtljctvmr ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943093.148976-15887-61767546226259/AnsiballZ_podman_play.py'\nJul 19 12:38:13 managed-node2 sudo[26554]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:38:13 managed-node2 platform-python[26557]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:38:13 managed-node2 systemd[25528]: Started podman-26565.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:13 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6\nJul 19 12:38:13 managed-node2 systemd[25528]: Started rootless-netns-6ed4b4b3.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:13 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this.\nJul 19 12:38:13 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vetha780888b: link is not ready\nJul 19 12:38:13 managed-node2 kernel: cni-podman1: port 1(vetha780888b) entered blocking state\nJul 19 12:38:13 managed-node2 kernel: cni-podman1: port 1(vetha780888b) entered disabled state\nJul 19 12:38:13 managed-node2 kernel: device vetha780888b entered promiscuous mode\nJul 19 12:38:13 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready\nJul 19 12:38:13 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready\nJul 19 12:38:13 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vetha780888b: link becomes ready\nJul 19 12:38:13 managed-node2 kernel: cni-podman1: port 1(vetha780888b) entered blocking state\nJul 19 12:38:13 managed-node2 kernel: cni-podman1: port 1(vetha780888b) entered forwarding state\nJul 19 12:38:14 managed-node2 dnsmasq[26752]: listening on cni-podman1(#3): 10.89.0.1\nJul 19 12:38:14 managed-node2 dnsmasq[26754]: started, version 2.79 cachesize 150\nJul 19 12:38:14 managed-node2 dnsmasq[26754]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify\nJul 19 12:38:14 managed-node2 dnsmasq[26754]: using local addresses only for domain dns.podman\nJul 19 12:38:14 managed-node2 dnsmasq[26754]: reading /etc/resolv.conf\nJul 19 12:38:14 managed-node2 dnsmasq[26754]: using local addresses only for domain dns.podman\nJul 19 12:38:14 managed-node2 dnsmasq[26754]: using nameserver 10.0.2.3#53\nJul 19 12:38:14 managed-node2 dnsmasq[26754]: using nameserver 10.29.169.13#53\nJul 19 12:38:14 managed-node2 dnsmasq[26754]: using nameserver 10.29.170.12#53\nJul 19 12:38:14 managed-node2 dnsmasq[26754]: using nameserver 10.2.32.1#53\nJul 19 12:38:14 managed-node2 dnsmasq[26754]: read /run/user/3001/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses\nJul 19 12:38:14 managed-node2 conmon[26767]: conmon f153d4517c8778d9470c : failed to write to /proc/self/oom_score_adj: Permission denied\nJul 19 12:38:14 managed-node2 conmon[26768]: conmon f153d4517c8778d9470c : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach}\nJul 19 12:38:14 managed-node2 conmon[26768]: conmon f153d4517c8778d9470c : terminal_ctrl_fd: 14\nJul 19 12:38:14 managed-node2 conmon[26768]: conmon f153d4517c8778d9470c : winsz read side: 17, winsz write side: 18\nJul 19 12:38:14 managed-node2 conmon[26768]: conmon f153d4517c8778d9470c : container PID: 26778\nJul 19 12:38:14 managed-node2 conmon[26788]: conmon a8773b3857e3e0dd4e13 : failed to write to /proc/self/oom_score_adj: Permission denied\nJul 19 12:38:14 managed-node2 conmon[26789]: conmon a8773b3857e3e0dd4e13 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach}\nJul 19 12:38:14 managed-node2 conmon[26789]: conmon a8773b3857e3e0dd4e13 : terminal_ctrl_fd: 13\nJul 19 12:38:14 managed-node2 conmon[26789]: conmon a8773b3857e3e0dd4e13 : winsz read side: 16, winsz write side: 17\nJul 19 12:38:14 managed-node2 conmon[26789]: conmon a8773b3857e3e0dd4e13 : container PID: 26799\nJul 19 12:38:14 managed-node2 platform-python[26557]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml\nJul 19 12:38:14 managed-node2 platform-python[26557]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod:\n 0c3499cd78df9c3cbd4aa0aa8e91445278eb6157f70949cc299bc9c609a42f3c\n Container:\n a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458\n \nJul 19 12:38:14 managed-node2 platform-python[26557]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time=\"2025-07-19T12:38:13-04:00\" level=info msg=\"/bin/podman filtering at log level debug\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Using graph root /home/podman_basic_user/.local/share/containers/storage\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Using run root /run/user/3001/containers\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Using tmp dir /run/user/3001/libpod/tmp\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Using transient store: false\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Cached value indicated that metacopy is not being used\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Cached value indicated that native-diff is usable\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Initializing event backend file\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Successfully loaded 1 networks\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"found free device name cni-podman1\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"found free ipv4 network subnet 10.89.0.0/24\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-19 12:38:13.521272 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Successfully loaded 2 networks\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Looking up image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"reference \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\\\" does not resolve to an image ID\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"reference \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\\\" does not resolve to an image ID\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"FROM \\\"scratch\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Cached value indicated that idmapped mounts for overlay are not supported\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Check for idmapped mounts support \"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"overlay: test mount indicated that volatile is being used\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/340addd39a0119ab1a39a6915aa4a3fafdf78511c642616c3fe04da09dcf3345/empty,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/340addd39a0119ab1a39a6915aa4a3fafdf78511c642616c3fe04da09dcf3345/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/340addd39a0119ab1a39a6915aa4a3fafdf78511c642616c3fe04da09dcf3345/work,userxattr,volatile,context=\\\"system_u:object_r:container_file_t:s0:c480,c514\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Container ID: ac8e6c0ad9d62a1134f2644b1390fd8fa36d22d0d6282cefc7edd95b4f95d64d\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:copy Args:[/usr/libexec/podman/catatonit /catatonit] Flags:[] Attrs:map[] Message:COPY /usr/libexec/podman/catatonit /catatonit Heredocs:[] Original:COPY /usr/libexec/podman/catatonit /catatonit}\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"COPY []string(nil), imagebuilder.Copy{FromFS:false, From:\\\"\\\", Src:[]string{\\\"/usr/libexec/podman/catatonit\\\"}, Dest:\\\"/catatonit\\\", Download:false, Chown:\\\"\\\", Chmod:\\\"\\\", Checksum:\\\"\\\", Files:[]imagebuilder.File(nil)}\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"added content file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:entrypoint Args:[/catatonit -P] Flags:[] Attrs:map[json:true] Message:ENTRYPOINT /catatonit -P Heredocs:[] Original:ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]}\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"COMMIT localhost/podman-pause:4.9.4-dev-1708535009\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"COMMIT \\\"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"committing image with reference \\\"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\\\" is allowed by policy\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"layer list: [\\\"340addd39a0119ab1a39a6915aa4a3fafdf78511c642616c3fe04da09dcf3345\\\"]\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"using \\\"/var/tmp/buildah2427832820\\\" to hold temporary data\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Tar with options on /home/podman_basic_user/.local/share/containers/storage/overlay/340addd39a0119ab1a39a6915aa4a3fafdf78511c642616c3fe04da09dcf3345/diff\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"layer \\\"340addd39a0119ab1a39a6915aa4a3fafdf78511c642616c3fe04da09dcf3345\\\" size is 767488 bytes, uncompressed digest sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690, possibly-compressed digest sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"OCIv1 config = {\\\"created\\\":\\\"2025-07-19T16:38:13.656892898Z\\\",\\\"architecture\\\":\\\"amd64\\\",\\\"os\\\":\\\"linux\\\",\\\"config\\\":{\\\"Env\\\":[\\\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\\\"],\\\"Entrypoint\\\":[\\\"/catatonit\\\",\\\"-P\\\"],\\\"Labels\\\":{\\\"io.buildah.version\\\":\\\"1.33.5\\\"}},\\\"rootfs\\\":{\\\"type\\\":\\\"layers\\\",\\\"diff_ids\\\":[\\\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\\\"]},\\\"history\\\":[{\\\"created\\\":\\\"2025-07-19T16:38:13.656345599Z\\\",\\\"created_by\\\":\\\"/bin/sh -c #(nop) COPY file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd in /catatonit \\\",\\\"empty_layer\\\":true},{\\\"created\\\":\\\"2025-07-19T16:38:13.660597339Z\\\",\\\"created_by\\\":\\\"/bin/sh -c #(nop) ENTRYPOINT [\\\\\\\"/catatonit\\\\\\\", \\\\\\\"-P\\\\\\\"]\\\"}]}\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"OCIv1 manifest = {\\\"schemaVersion\\\":2,\\\"mediaType\\\":\\\"application/vnd.oci.image.manifest.v1+json\\\",\\\"config\\\":{\\\"mediaType\\\":\\\"application/vnd.oci.image.config.v1+json\\\",\\\"digest\\\":\\\"sha256:9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\\\",\\\"size\\\":668},\\\"layers\\\":[{\\\"mediaType\\\":\\\"application/vnd.oci.image.layer.v1.tar\\\",\\\"digest\\\":\\\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\\\",\\\"size\\\":767488}],\\\"annotations\\\":{\\\"org.opencontainers.image.base.digest\\\":\\\"\\\",\\\"org.opencontainers.image.base.name\\\":\\\"\\\"}}\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Docker v2s2 config = {\\\"created\\\":\\\"2025-07-19T16:38:13.656892898Z\\\",\\\"container\\\":\\\"ac8e6c0ad9d62a1134f2644b1390fd8fa36d22d0d6282cefc7edd95b4f95d64d\\\",\\\"container_config\\\":{\\\"Hostname\\\":\\\"\\\",\\\"Domainname\\\":\\\"\\\",\\\"User\\\":\\\"\\\",\\\"AttachStdin\\\":false,\\\"AttachStdout\\\":false,\\\"AttachStderr\\\":false,\\\"Tty\\\":false,\\\"OpenStdin\\\":false,\\\"StdinOnce\\\":false,\\\"Env\\\":[\\\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\\\"],\\\"Cmd\\\":[],\\\"Image\\\":\\\"\\\",\\\"Volumes\\\":{},\\\"WorkingDir\\\":\\\"\\\",\\\"Entrypoint\\\":[\\\"/catatonit\\\",\\\"-P\\\"],\\\"OnBuild\\\":[],\\\"Labels\\\":{\\\"io.buildah.version\\\":\\\"1.33.5\\\"}},\\\"config\\\":{\\\"Hostname\\\":\\\"\\\",\\\"Domainname\\\":\\\"\\\",\\\"User\\\":\\\"\\\",\\\"AttachStdin\\\":false,\\\"AttachStdout\\\":false,\\\"AttachStderr\\\":false,\\\"Tty\\\":false,\\\"OpenStdin\\\":false,\\\"StdinOnce\\\":false,\\\"Env\\\":[\\\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\\\"],\\\"Cmd\\\":[],\\\"Image\\\":\\\"\\\",\\\"Volumes\\\":{},\\\"WorkingDir\\\":\\\"\\\",\\\"Entrypoint\\\":[\\\"/catatonit\\\",\\\"-P\\\"],\\\"OnBuild\\\":[],\\\"Labels\\\":{\\\"io.buildah.version\\\":\\\"1.33.5\\\"}},\\\"architecture\\\":\\\"amd64\\\",\\\"os\\\":\\\"linux\\\",\\\"rootfs\\\":{\\\"type\\\":\\\"layers\\\",\\\"diff_ids\\\":[\\\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\\\"]},\\\"history\\\":[{\\\"created\\\":\\\"2025-07-19T16:38:13.656345599Z\\\",\\\"created_by\\\":\\\"/bin/sh -c #(nop) COPY file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd in /catatonit \\\",\\\"empty_layer\\\":true},{\\\"created\\\":\\\"2025-07-19T16:38:13.660597339Z\\\",\\\"created_by\\\":\\\"/bin/sh -c #(nop) ENTRYPOINT [\\\\\\\"/catatonit\\\\\\\", \\\\\\\"-P\\\\\\\"]\\\"}]}\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Docker v2s2 manifest = {\\\"schemaVersion\\\":2,\\\"mediaType\\\":\\\"application/vnd.docker.distribution.manifest.v2+json\\\",\\\"config\\\":{\\\"mediaType\\\":\\\"application/vnd.docker.container.image.v1+json\\\",\\\"size\\\":1342,\\\"digest\\\":\\\"sha256:803cd64c1bc1a2e7297b3d5f520a915c581e4037aabac925fb21fc3ad8b279ee\\\"},\\\"layers\\\":[{\\\"mediaType\\\":\\\"application/vnd.docker.image.rootfs.diff.tar\\\",\\\"size\\\":767488,\\\"digest\\\":\\\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\\\"}]}\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Using SQLite blob info cache at /home/podman_basic_user/.local/share/containers/cache/blob-info-cache-v1.sqlite\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"IsRunningImageAllowed for image containers-storage:\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\" Using transport \\\"containers-storage\\\" policy section \"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\" Requirement 0: allowed\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Overall: allowed\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"start reading config\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"finished reading config\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Manifest has MIME type application/vnd.oci.image.manifest.v1+json, ordered candidate list [application/vnd.oci.image.manifest.v1+json, application/vnd.docker.distribution.manifest.v2+json, application/vnd.docker.distribution.manifest.v1+prettyjws, application/vnd.docker.distribution.manifest.v1+json]\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"... will first try using the original manifest unmodified\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Checking if we can reuse blob sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690: general substitution = true, compression for MIME type \\\"application/vnd.oci.image.layer.v1.tar\\\" = true\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"reading layer \\\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"No compression detected\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Using original blob without modification\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Applying tar in /home/podman_basic_user/.local/share/containers/storage/overlay/d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690/diff\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"finished reading layer \\\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"No compression detected\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Compression change for blob sha256:9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63 (\\\"application/vnd.oci.image.config.v1+json\\\") not supported\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Using original blob without modification\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"setting image creation date to 2025-07-19 16:38:13.656892898 +0000 UTC\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"created new image ID \\\"9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\\\" with metadata \\\"{}\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"added name \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" to image \\\"9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"printing final image id \\\"9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Pod using bridge network mode\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Got pod cgroup as /libpod_parent/0c3499cd78df9c3cbd4aa0aa8e91445278eb6157f70949cc299bc9c609a42f3c\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Looking up image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63)\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Inspecting image 9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Inspecting image 9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Inspecting image 9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"using systemd mode: false\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"setting container name 0c3499cd78df-infra\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Loading seccomp profile from \\\"/usr/share/containers/seccomp.json\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Allocated lock 1 for container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Created container \\\"f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Container \\\"f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c\\\" has work directory \\\"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Container \\\"f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c\\\" has run directory \\\"/run/user/3001/containers/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Pulling image quay.io/libpod/testimage:20210610 (policy: missing)\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"using systemd mode: false\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"adding container to pod httpd1\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"setting container name httpd1-httpd1\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Loading seccomp profile from \\\"/usr/share/containers/seccomp.json\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=info msg=\"Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Adding mount /proc\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Adding mount /dev\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Adding mount /dev/pts\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Adding mount /dev/mqueue\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Adding mount /sys\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Adding mount /sys/fs/cgroup\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Allocated lock 2 for container a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Created container \\\"a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Container \\\"a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458\\\" has work directory \\\"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Container \\\"a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458\\\" has run directory \\\"/run/user/3001/containers/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Strongconnecting node f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Pushed f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c onto stack\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Finishing node f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c. Popped f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c off stack\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Strongconnecting node a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Pushed a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 onto stack\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Finishing node a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458. Popped a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 off stack\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/T3ZNBLNG2W7D2UELJU7O7YZ76X,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/127ba02fe95229d135be77cff068ce7d066eb98b87e356cce8c34501e113568e/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/127ba02fe95229d135be77cff068ce7d066eb98b87e356cce8c34501e113568e/work,userxattr,context=\\\"system_u:object_r:container_file_t:s0:c330,c361\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Mounted container \\\"f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c\\\" at \\\"/home/podman_basic_user/.local/share/containers/storage/overlay/127ba02fe95229d135be77cff068ce7d066eb98b87e356cce8c34501e113568e/merged\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Created root filesystem for container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c at /home/podman_basic_user/.local/share/containers/storage/overlay/127ba02fe95229d135be77cff068ce7d066eb98b87e356cce8c34501e113568e/merged\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"Made network namespace at /run/user/3001/netns/netns-06b3cc7d-4137-7077-edbe-bd7530bc2101 for container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"creating rootless network namespace with name \\\"rootless-netns-d22c9f230d0691b8f418\\\"\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"slirp4netns command: /bin/slirp4netns --disable-host-loopback --mtu=65520 --enable-sandbox --enable-seccomp --enable-ipv6 -c -r 3 --netns-type=path /run/user/3001/netns/rootless-netns-d22c9f230d0691b8f418 tap0\"\n time=\"2025-07-19T12:38:13-04:00\" level=debug msg=\"The path of /etc/resolv.conf in the mount ns is \\\"/etc/resolv.conf\\\"\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"cni result for container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c network podman-default-kube-network: &{0.4.0 [{Name:cni-podman1 Mac:76:19:76:c8:78:b3 Sandbox:} {Name:vetha780888b Mac:f2:ee:6b:fd:41:a0 Sandbox:} {Name:eth0 Mac:2e:67:99:01:50:2a Sandbox:/run/user/3001/netns/netns-06b3cc7d-4137-7077-edbe-bd7530bc2101}] [{Version:4 Interface:0xc000c00b08 Address:{IP:10.89.0.2 Mask:ffffff00} Gateway:10.89.0.1}] [{Dst:{IP:0.0.0.0 Mask:00000000} GW:}] {[10.89.0.1] [dns.podman] []}}\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"rootlessport: time=\\\"2025-07-19T12:38:14-04:00\\\" level=info msg=\\\"Starting parent driver\\\"\\n\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"rootlessport: time=\\\"2025-07-19T12:38:14-04:00\\\" level=info msg=\\\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport2421233428/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport2421233428/.bp.sock]\\\"\\n\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"rootlessport: time=\\\"2025-07-19T12:38:14-04:00\\\" level=info msg=\\\"Starting child driver in child netns (\\\\\\\"/proc/self/exe\\\\\\\" [rootlessport-child])\\\"\\n\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"rootlessport: time=\\\"2025-07-19T12:38:14-04:00\\\" level=info msg=\\\"Waiting for initComplete\\\"\\n\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"rootlessport: time=\\\"2025-07-19T12:38:14-04:00\\\" level=info msg=\\\"initComplete is closed; parent and child established the communication channel\\\"\\n\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"rootlessport: time=\\\"2025-07-19T12:38:14-04:00\\\" level=info msg=\\\"Exposing ports [{ 80 15001 1 tcp}]\\\"\\n\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"rootlessport is ready\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"rootlessport: time=\\\"2025-07-19T12:38:14-04:00\\\" level=info msg=Ready\\n\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"/etc/system-fips does not exist on host, not mounting FIPS mode subscription\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"reading hooks from /usr/share/containers/oci/hooks.d\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Workdir \\\"/\\\" resolved to host path \\\"/home/podman_basic_user/.local/share/containers/storage/overlay/127ba02fe95229d135be77cff068ce7d066eb98b87e356cce8c34501e113568e/merged\\\"\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Created OCI spec for container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata/config.json\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Got pod cgroup as \"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"/usr/bin/conmon messages will be logged to syslog\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"running conmon: /usr/bin/conmon\" args=\"[--api-version 1 -c f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c -u f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c -r /usr/bin/runc -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata -p /run/user/3001/containers/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata/pidfile -n 0c3499cd78df-infra --exit-dir /run/user/3001/libpod/tmp/exits --full-attach -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg cgroupfs --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c]\"\n time=\"2025-07-19T12:38:14-04:00\" level=info msg=\"Failed to add conmon to cgroupfs sandbox cgroup: creating cgroup for blkio: mkdir /sys/fs/cgroup/blkio/libpod_parent: permission denied\"\n [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied\n \n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Received: 26778\"\n time=\"2025-07-19T12:38:14-04:00\" level=info msg=\"Got Conmon PID as 26768\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Created container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c in OCI runtime\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Adding nameserver(s) from network status of '[\\\"10.89.0.1\\\"]'\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Adding search domain(s) from network status of '[\\\"dns.podman\\\"]'\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Starting container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c with command [/catatonit -P]\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Started container f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/Q6SNT2SFVF32LFZYXFZFNM34JV,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/30ccda56ccc67908370dcb7e56c6e44595e70d6af5e179ee106b4a3d90b31e0f/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/30ccda56ccc67908370dcb7e56c6e44595e70d6af5e179ee106b4a3d90b31e0f/work,userxattr,context=\\\"system_u:object_r:container_file_t:s0:c330,c361\\\"\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Mounted container \\\"a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458\\\" at \\\"/home/podman_basic_user/.local/share/containers/storage/overlay/30ccda56ccc67908370dcb7e56c6e44595e70d6af5e179ee106b4a3d90b31e0f/merged\\\"\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Created root filesystem for container a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 at /home/podman_basic_user/.local/share/containers/storage/overlay/30ccda56ccc67908370dcb7e56c6e44595e70d6af5e179ee106b4a3d90b31e0f/merged\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"/etc/system-fips does not exist on host, not mounting FIPS mode subscription\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"reading hooks from /usr/share/containers/oci/hooks.d\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Workdir \\\"/var/www\\\" resolved to a volume or mount\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Created OCI spec for container a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata/config.json\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Got pod cgroup as \"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"/usr/bin/conmon messages will be logged to syslog\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"running conmon: /usr/bin/conmon\" args=\"[--api-version 1 -c a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 -u a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 -r /usr/bin/runc -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata -p /run/user/3001/containers/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata/pidfile -n httpd1-httpd1 --exit-dir /run/user/3001/libpod/tmp/exits --full-attach -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg cgroupfs --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458]\"\n time=\"2025-07-19T12:38:14-04:00\" level=info msg=\"Failed to add conmon to cgroupfs sandbox cgroup: creating cgroup for cpu: mkdir /sys/fs/cgroup/cpu/conmon: permission denied\"\n [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied\n \n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Received: 26799\"\n time=\"2025-07-19T12:38:14-04:00\" level=info msg=\"Got Conmon PID as 26789\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Created container a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 in OCI runtime\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Starting container a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458 with command [/bin/busybox-extras httpd -f -p 80]\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Started container a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Called kube.PersistentPostRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)\"\n time=\"2025-07-19T12:38:14-04:00\" level=debug msg=\"Shutting down engines\"\nJul 19 12:38:14 managed-node2 platform-python[26557]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0\nJul 19 12:38:14 managed-node2 sudo[26554]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:38:14 managed-node2 sudo[26930]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lmrmjlkkbsynluhmnwsuczlkvrvxmsws ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943094.4993763-15928-49906019321868/AnsiballZ_systemd.py'\nJul 19 12:38:14 managed-node2 sudo[26930]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:38:14 managed-node2 platform-python[26933]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None\nJul 19 12:38:14 managed-node2 systemd[25528]: Reloading.\nJul 19 12:38:14 managed-node2 sudo[26930]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:38:15 managed-node2 dnsmasq[26754]: listening on cni-podman1(#3): fe80::7419:76ff:fec8:78b3%cni-podman1\nJul 19 12:38:15 managed-node2 sudo[27068]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gajwmakwkbshdhvfjxukfbkcepplsltd ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943095.0683737-15953-10632554991967/AnsiballZ_systemd.py'\nJul 19 12:38:15 managed-node2 sudo[27068]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:38:15 managed-node2 platform-python[27071]: ansible-systemd Invoked with name= scope=user enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None user=None\nJul 19 12:38:15 managed-node2 systemd[25528]: Reloading.\nJul 19 12:38:15 managed-node2 sudo[27068]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:38:15 managed-node2 sudo[27207]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dcmefbhxtsrhgtubtgzkfqommjfyuibn ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943095.7285938-15986-176076416334674/AnsiballZ_systemd.py'\nJul 19 12:38:15 managed-node2 sudo[27207]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:38:16 managed-node2 platform-python[27210]: ansible-systemd Invoked with name= scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None\nJul 19 12:38:16 managed-node2 systemd[25528]: Created slice podman\\x2dkube.slice.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:16 managed-node2 systemd[25528]: Starting A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit UNIT has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has begun starting up.\nJul 19 12:38:16 managed-node2 conmon[26789]: conmon a8773b3857e3e0dd4e13 : container 26799 exited with status 137\nJul 19 12:38:16 managed-node2 conmon[26768]: conmon f153d4517c8778d9470c : container 26778 exited with status 137\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458)\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=info msg=\"Using sqlite as database backend\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using graph driver overlay\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using graph root /home/podman_basic_user/.local/share/containers/storage\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using run root /run/user/3001/containers\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using tmp dir /run/user/3001/libpod/tmp\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using transient store: false\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Cached value indicated that metacopy is not being used\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Cached value indicated that native-diff is usable\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Initializing event backend file\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=info msg=\"Setting parallel job count to 7\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c)\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=info msg=\"Using sqlite as database backend\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using graph driver overlay\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using graph root /home/podman_basic_user/.local/share/containers/storage\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using run root /run/user/3001/containers\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using tmp dir /run/user/3001/libpod/tmp\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using transient store: false\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Cached value indicated that metacopy is not being used\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Cached value indicated that native-diff is usable\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Initializing event backend file\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=info msg=\"Setting parallel job count to 7\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup a8773b3857e3e0dd4e13fe7873092f6b99c52df33df1b8b63648bbdb52f52458)\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27235]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Shutting down engines\"\nJul 19 12:38:16 managed-node2 kernel: cni-podman1: port 1(vetha780888b) entered disabled state\nJul 19 12:38:16 managed-node2 kernel: device vetha780888b left promiscuous mode\nJul 19 12:38:16 managed-node2 kernel: cni-podman1: port 1(vetha780888b) entered disabled state\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup f153d4517c8778d9470c532bdfc50890f4d83c1a2c2d0fa45336925349495f5c)\"\nJul 19 12:38:16 managed-node2 /usr/bin/podman[27242]: time=\"2025-07-19T12:38:16-04:00\" level=debug msg=\"Shutting down engines\"\nJul 19 12:38:16 managed-node2 podman[27216]: Pods stopped:\nJul 19 12:38:16 managed-node2 podman[27216]: 0c3499cd78df9c3cbd4aa0aa8e91445278eb6157f70949cc299bc9c609a42f3c\nJul 19 12:38:16 managed-node2 podman[27216]: Pods removed:\nJul 19 12:38:16 managed-node2 podman[27216]: 0c3499cd78df9c3cbd4aa0aa8e91445278eb6157f70949cc299bc9c609a42f3c\nJul 19 12:38:16 managed-node2 podman[27216]: Secrets removed:\nJul 19 12:38:16 managed-node2 podman[27216]: Volumes removed:\nJul 19 12:38:16 managed-node2 systemd[25528]: Started rootless-netns-1ff27aec.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:16 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): veth24653eaf: link is not ready\nJul 19 12:38:16 managed-node2 kernel: cni-podman1: port 1(veth24653eaf) entered blocking state\nJul 19 12:38:16 managed-node2 kernel: cni-podman1: port 1(veth24653eaf) entered disabled state\nJul 19 12:38:16 managed-node2 kernel: device veth24653eaf entered promiscuous mode\nJul 19 12:38:16 managed-node2 kernel: cni-podman1: port 1(veth24653eaf) entered blocking state\nJul 19 12:38:16 managed-node2 kernel: cni-podman1: port 1(veth24653eaf) entered forwarding state\nJul 19 12:38:16 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth24653eaf: link becomes ready\nJul 19 12:38:16 managed-node2 dnsmasq[27465]: listening on cni-podman1(#3): 10.89.0.1\nJul 19 12:38:16 managed-node2 dnsmasq[27467]: started, version 2.79 cachesize 150\nJul 19 12:38:16 managed-node2 dnsmasq[27467]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify\nJul 19 12:38:16 managed-node2 dnsmasq[27467]: using local addresses only for domain dns.podman\nJul 19 12:38:16 managed-node2 dnsmasq[27467]: reading /etc/resolv.conf\nJul 19 12:38:16 managed-node2 dnsmasq[27467]: using local addresses only for domain dns.podman\nJul 19 12:38:16 managed-node2 dnsmasq[27467]: using nameserver 10.0.2.3#53\nJul 19 12:38:16 managed-node2 dnsmasq[27467]: using nameserver 10.29.169.13#53\nJul 19 12:38:16 managed-node2 dnsmasq[27467]: using nameserver 10.29.170.12#53\nJul 19 12:38:16 managed-node2 dnsmasq[27467]: using nameserver 10.2.32.1#53\nJul 19 12:38:16 managed-node2 dnsmasq[27467]: read /run/user/3001/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses\nJul 19 12:38:17 managed-node2 podman[27216]: Pod:\nJul 19 12:38:17 managed-node2 podman[27216]: 2d1d21944d4ca561d0a237f029182068a925bb49513d0d920c7ad527b530a87a\nJul 19 12:38:17 managed-node2 podman[27216]: Container:\nJul 19 12:38:17 managed-node2 podman[27216]: fbdb7144dbaf3a0b80484872c9bcae1ed8f6a793661386bc91aa084464c69027\nJul 19 12:38:17 managed-node2 systemd[25528]: Started A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:17 managed-node2 sudo[27207]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:38:17 managed-node2 platform-python[27643]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 19 12:38:18 managed-node2 platform-python[27767]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:38:18 managed-node2 dnsmasq[27467]: listening on cni-podman1(#3): fe80::f826:e2ff:fec6:eea3%cni-podman1\nJul 19 12:38:19 managed-node2 platform-python[27892]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:20 managed-node2 platform-python[28016]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:38:21 managed-node2 platform-python[28139]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:38:21 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 19 12:38:21 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 19 12:38:22 managed-node2 platform-python[28430]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:38:23 managed-node2 platform-python[28553]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:38:23 managed-node2 platform-python[28676]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 19 12:38:23 managed-node2 platform-python[28775]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752943103.1443834-16356-64021954424990/source _original_basename=tmp0hh2oj3u follow=False checksum=b06d991e561d2233cf906d852db9b578dc61ce26 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None\nJul 19 12:38:24 managed-node2 platform-python[28900]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:38:24 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice.\n-- Subject: Unit machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.3685] manager: (cni-podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3)\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.3718] manager: (vethf4165b4a): new Veth device (/org/freedesktop/NetworkManager/Devices/4)\nJul 19 12:38:24 managed-node2 systemd-udevd[28949]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.\nJul 19 12:38:24 managed-node2 systemd-udevd[28949]: Could not generate persistent MAC address for vethf4165b4a: No such file or directory\nJul 19 12:38:24 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vethf4165b4a: link is not ready\nJul 19 12:38:24 managed-node2 systemd-udevd[28948]: Using default interface naming scheme 'rhel-8.0'.\nJul 19 12:38:24 managed-node2 systemd-udevd[28948]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.\nJul 19 12:38:24 managed-node2 systemd-udevd[28948]: Could not generate persistent MAC address for cni-podman1: No such file or directory\nJul 19 12:38:24 managed-node2 kernel: cni-podman1: port 1(vethf4165b4a) entered blocking state\nJul 19 12:38:24 managed-node2 kernel: cni-podman1: port 1(vethf4165b4a) entered disabled state\nJul 19 12:38:24 managed-node2 kernel: device vethf4165b4a entered promiscuous mode\nJul 19 12:38:24 managed-node2 dbus-daemon[591]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.5' (uid=0 pid=661 comm=\"/usr/sbin/NetworkManager --no-daemon \" label=\"system_u:system_r:NetworkManager_t:s0\")\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4140] device (cni-podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', sys-iface-state: 'external')\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4145] device (cni-podman1): state change: unavailable -> disconnected (reason 'connection-assumed', sys-iface-state: 'external')\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4153] device (cni-podman1): Activation: starting connection 'cni-podman1' (288926ec-c137-47aa-80eb-b1812c1bfed2)\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4154] device (cni-podman1): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'external')\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4157] device (cni-podman1): state change: prepare -> config (reason 'none', sys-iface-state: 'external')\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4159] device (cni-podman1): state change: config -> ip-config (reason 'none', sys-iface-state: 'external')\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4160] device (cni-podman1): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'external')\nJul 19 12:38:24 managed-node2 systemd[1]: Starting Network Manager Script Dispatcher Service...\n-- Subject: Unit NetworkManager-dispatcher.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit NetworkManager-dispatcher.service has begun starting up.\nJul 19 12:38:24 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready\nJul 19 12:38:24 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready\nJul 19 12:38:24 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vethf4165b4a: link becomes ready\nJul 19 12:38:24 managed-node2 kernel: cni-podman1: port 1(vethf4165b4a) entered blocking state\nJul 19 12:38:24 managed-node2 kernel: cni-podman1: port 1(vethf4165b4a) entered forwarding state\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4643] device (vethf4165b4a): carrier: link connected\nJul 19 12:38:24 managed-node2 dbus-daemon[591]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher'\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4647] device (cni-podman1): carrier: link connected\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4663] device (cni-podman1): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'external')\nJul 19 12:38:24 managed-node2 systemd[1]: Started Network Manager Script Dispatcher Service.\n-- Subject: Unit NetworkManager-dispatcher.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit NetworkManager-dispatcher.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4665] device (cni-podman1): state change: secondaries -> activated (reason 'none', sys-iface-state: 'external')\nJul 19 12:38:24 managed-node2 NetworkManager[661]: [1752943104.4669] device (cni-podman1): Activation: successful, device activated.\nJul 19 12:38:24 managed-node2 dnsmasq[29070]: listening on cni-podman1(#3): 10.89.0.1\nJul 19 12:38:24 managed-node2 dnsmasq[29074]: started, version 2.79 cachesize 150\nJul 19 12:38:24 managed-node2 dnsmasq[29074]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify\nJul 19 12:38:24 managed-node2 dnsmasq[29074]: using local addresses only for domain dns.podman\nJul 19 12:38:24 managed-node2 dnsmasq[29074]: reading /etc/resolv.conf\nJul 19 12:38:24 managed-node2 dnsmasq[29074]: using local addresses only for domain dns.podman\nJul 19 12:38:24 managed-node2 dnsmasq[29074]: using nameserver 10.29.169.13#53\nJul 19 12:38:24 managed-node2 dnsmasq[29074]: using nameserver 10.29.170.12#53\nJul 19 12:38:24 managed-node2 dnsmasq[29074]: using nameserver 10.2.32.1#53\nJul 19 12:38:24 managed-node2 dnsmasq[29074]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses\nJul 19 12:38:24 managed-node2 systemd[1]: Started libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope.\n-- Subject: Unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:24 managed-node2 conmon[29076]: conmon c67c85c8cb2aae9cc679 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach}\nJul 19 12:38:24 managed-node2 conmon[29076]: conmon c67c85c8cb2aae9cc679 : terminal_ctrl_fd: 13\nJul 19 12:38:24 managed-node2 conmon[29076]: conmon c67c85c8cb2aae9cc679 : winsz read side: 17, winsz write side: 18\nJul 19 12:38:24 managed-node2 systemd[1]: Started libcontainer container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.\n-- Subject: Unit libpod-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:24 managed-node2 conmon[29076]: conmon c67c85c8cb2aae9cc679 : container PID: 29087\nJul 19 12:38:24 managed-node2 systemd[1]: Started libpod-conmon-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope.\n-- Subject: Unit libpod-conmon-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-conmon-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:24 managed-node2 conmon[29098]: conmon add9038e274085c173b1 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach}\nJul 19 12:38:24 managed-node2 conmon[29098]: conmon add9038e274085c173b1 : terminal_ctrl_fd: 12\nJul 19 12:38:24 managed-node2 conmon[29098]: conmon add9038e274085c173b1 : winsz read side: 16, winsz write side: 17\nJul 19 12:38:24 managed-node2 systemd[1]: Started libcontainer container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.\n-- Subject: Unit libpod-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:24 managed-node2 conmon[29098]: conmon add9038e274085c173b1 : container PID: 29108\nJul 19 12:38:24 managed-node2 platform-python[28900]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml\nJul 19 12:38:24 managed-node2 platform-python[28900]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod:\n f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816\n Container:\n add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\n \nJul 19 12:38:24 managed-node2 platform-python[28900]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time=\"2025-07-19T12:38:24-04:00\" level=info msg=\"/usr/bin/podman filtering at log level debug\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Using run root /run/containers/storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Using tmp dir /run/libpod\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Using transient store: false\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\n time=\"2025-07-19T12:38:24-04:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Initializing event backend file\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-19 12:35:55.640649556 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Successfully loaded 2 networks\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Looking up image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a)\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Pod using bridge network mode\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice for parent machine.slice and name libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Looking up image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a)\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Inspecting image fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Inspecting image fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Inspecting image fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"using systemd mode: false\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"setting container name f8000a88fe4a-infra\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Loading seccomp profile from \\\"/usr/share/containers/seccomp.json\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Allocated lock 1 for container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Cached value indicated that idmapped mounts for overlay are not supported\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Check for idmapped mounts support \"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created container \\\"c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Container \\\"c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\\\" has work directory \\\"/var/lib/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Container \\\"c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\\\" has run directory \\\"/run/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Pulling image quay.io/libpod/testimage:20210610 (policy: missing)\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"using systemd mode: false\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"adding container to pod httpd2\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"setting container name httpd2-httpd2\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Loading seccomp profile from \\\"/usr/share/containers/seccomp.json\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=info msg=\"Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Adding mount /proc\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Adding mount /dev\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Adding mount /dev/pts\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Adding mount /dev/mqueue\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Adding mount /sys\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Adding mount /sys/fs/cgroup\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Allocated lock 2 for container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created container \\\"add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Container \\\"add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\\\" has work directory \\\"/var/lib/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Container \\\"add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\\\" has run directory \\\"/run/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Strongconnecting node c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Pushed c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c onto stack\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Finishing node c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c. Popped c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c off stack\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Strongconnecting node add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Pushed add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f onto stack\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Finishing node add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f. Popped add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f off stack\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/HXWSEHVDVE6HABOKZ6B2SSNLKD,upperdir=/var/lib/containers/storage/overlay/6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e/diff,workdir=/var/lib/containers/storage/overlay/6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e/work,nodev,metacopy=on,context=\\\"system_u:object_r:container_file_t:s0:c723,c1018\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Mounted container \\\"c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\\\" at \\\"/var/lib/containers/storage/overlay/6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e/merged\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created root filesystem for container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c at /var/lib/containers/storage/overlay/6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e/merged\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Made network namespace at /run/netns/netns-f67fee73-2bbe-5ce9-31b0-8129b0eb7f47 for container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"cni result for container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c network podman-default-kube-network: &{0.4.0 [{Name:cni-podman1 Mac:ee:2d:d5:97:9a:6b Sandbox:} {Name:vethf4165b4a Mac:d2:23:54:53:0f:5f Sandbox:} {Name:eth0 Mac:ea:eb:9c:fe:80:d8 Sandbox:/run/netns/netns-f67fee73-2bbe-5ce9-31b0-8129b0eb7f47}] [{Version:4 Interface:0xc0005a9428 Address:{IP:10.89.0.2 Mask:ffffff00} Gateway:10.89.0.1}] [{Dst:{IP:0.0.0.0 Mask:00000000} GW:}] {[10.89.0.1] [dns.podman] []}}\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"/etc/system-fips does not exist on host, not mounting FIPS mode subscription\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Setting Cgroups for container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c to machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice:libpod:c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"reading hooks from /usr/share/containers/oci/hooks.d\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Workdir \\\"/\\\" resolved to host path \\\"/var/lib/containers/storage/overlay/6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e/merged\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created OCI spec for container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c at /var/lib/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata/config.json\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice for parent machine.slice and name libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"/usr/bin/conmon messages will be logged to syslog\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"running conmon: /usr/bin/conmon\" args=\"[--api-version 1 -c c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c -u c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c -r /usr/bin/runc -b /var/lib/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata -p /run/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata/pidfile -n f8000a88fe4a-infra --exit-dir /run/libpod/exits --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c]\"\n time=\"2025-07-19T12:38:24-04:00\" level=info msg=\"Running conmon under slice machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice and unitName libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Received: 29087\"\n time=\"2025-07-19T12:38:24-04:00\" level=info msg=\"Got Conmon PID as 29076\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c in OCI runtime\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Adding nameserver(s) from network status of '[\\\"10.89.0.1\\\"]'\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Adding search domain(s) from network status of '[\\\"dns.podman\\\"]'\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Starting container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c with command [/catatonit -P]\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Started container c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/HJUTFIUMULI3FBOA3A6VGXTPPL,upperdir=/var/lib/containers/storage/overlay/ea49505f4f2f1b3e93444d6d56c71c0ed1bb99d0acd9df081de88db0be157d45/diff,workdir=/var/lib/containers/storage/overlay/ea49505f4f2f1b3e93444d6d56c71c0ed1bb99d0acd9df081de88db0be157d45/work,nodev,metacopy=on,context=\\\"system_u:object_r:container_file_t:s0:c723,c1018\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Mounted container \\\"add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\\\" at \\\"/var/lib/containers/storage/overlay/ea49505f4f2f1b3e93444d6d56c71c0ed1bb99d0acd9df081de88db0be157d45/merged\\\"\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created root filesystem for container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f at /var/lib/containers/storage/overlay/ea49505f4f2f1b3e93444d6d56c71c0ed1bb99d0acd9df081de88db0be157d45/merged\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"/etc/system-fips does not exist on host, not mounting FIPS mode subscription\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Setting Cgroups for container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f to machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice:libpod:add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"reading hooks from /usr/share/containers/oci/hooks.d\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Workdir \\\"/var/www\\\" resolved to a volume or mount\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created OCI spec for container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f at /var/lib/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata/config.json\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice for parent machine.slice and name libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"/usr/bin/conmon messages will be logged to syslog\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"running conmon: /usr/bin/conmon\" args=\"[--api-version 1 -c add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f -u add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f -r /usr/bin/runc -b /var/lib/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata -p /run/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f]\"\n time=\"2025-07-19T12:38:24-04:00\" level=info msg=\"Running conmon under slice machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice and unitName libpod-conmon-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Received: 29108\"\n time=\"2025-07-19T12:38:24-04:00\" level=info msg=\"Got Conmon PID as 29098\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Created container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f in OCI runtime\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Starting container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f with command [/bin/busybox-extras httpd -f -p 80]\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Started container add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)\"\n time=\"2025-07-19T12:38:24-04:00\" level=debug msg=\"Shutting down engines\"\nJul 19 12:38:24 managed-node2 platform-python[28900]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0\nJul 19 12:38:25 managed-node2 platform-python[29239]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None\nJul 19 12:38:25 managed-node2 systemd[1]: Reloading.\nJul 19 12:38:25 managed-node2 dnsmasq[29074]: listening on cni-podman1(#3): fe80::ec2d:d5ff:fe97:9a6b%cni-podman1\nJul 19 12:38:26 managed-node2 platform-python[29400]: ansible-systemd Invoked with name= scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None user=None\nJul 19 12:38:26 managed-node2 systemd[1]: Reloading.\nJul 19 12:38:26 managed-node2 platform-python[29563]: ansible-systemd Invoked with name= scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None\nJul 19 12:38:26 managed-node2 systemd[1]: Created slice system-podman\\x2dkube.slice.\n-- Subject: Unit system-podman\\x2dkube.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit system-podman\\x2dkube.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:26 managed-node2 systemd[1]: Starting A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun starting up.\nJul 19 12:38:26 managed-node2 conmon[29076]: conmon c67c85c8cb2aae9cc679 : container 29087 exited with status 137\nJul 19 12:38:26 managed-node2 systemd[1]: libpod-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has successfully entered the 'dead' state.\nJul 19 12:38:26 managed-node2 systemd[1]: libpod-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope completed and consumed the indicated resources.\nJul 19 12:38:26 managed-node2 conmon[29098]: conmon add9038e274085c173b1 : container 29108 exited with status 137\nJul 19 12:38:26 managed-node2 systemd[1]: libpod-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope has successfully entered the 'dead' state.\nJul 19 12:38:26 managed-node2 systemd[1]: libpod-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope completed and consumed the indicated resources.\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c)\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=info msg=\"Using sqlite as database backend\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f)\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=info msg=\"Using sqlite as database backend\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using graph driver overlay\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using run root /run/containers/storage\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using tmp dir /run/libpod\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using transient store: false\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Initializing event backend file\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=info msg=\"Setting parallel job count to 7\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using graph driver overlay\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using run root /run/containers/storage\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using tmp dir /run/libpod\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using transient store: false\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Initializing event backend file\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:26-04:00\" level=info msg=\"Setting parallel job count to 7\"\nJul 19 12:38:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay-ea49505f4f2f1b3e93444d6d56c71c0ed1bb99d0acd9df081de88db0be157d45-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-ea49505f4f2f1b3e93444d6d56c71c0ed1bb99d0acd9df081de88db0be157d45-merged.mount has successfully entered the 'dead' state.\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f)\"\nJul 19 12:38:26 managed-node2 /usr/bin/podman[29597]: time=\"2025-07-19T12:38:26-04:00\" level=debug msg=\"Shutting down engines\"\nJul 19 12:38:26 managed-node2 systemd[1]: libpod-conmon-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-conmon-add9038e274085c173b1cf5767058b3f14204d53f9f40ddf78e148eb2cced49f.scope has successfully entered the 'dead' state.\nJul 19 12:38:26 managed-node2 kernel: cni-podman1: port 1(vethf4165b4a) entered disabled state\nJul 19 12:38:26 managed-node2 kernel: device vethf4165b4a left promiscuous mode\nJul 19 12:38:26 managed-node2 kernel: cni-podman1: port 1(vethf4165b4a) entered disabled state\nJul 19 12:38:26 managed-node2 systemd[1]: run-netns-netns\\x2df67fee73\\x2d2bbe\\x2d5ce9\\x2d31b0\\x2d8129b0eb7f47.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-netns-netns\\x2df67fee73\\x2d2bbe\\x2d5ce9\\x2d31b0\\x2d8129b0eb7f47.mount has successfully entered the 'dead' state.\nJul 19 12:38:27 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c-userdata-shm.mount has successfully entered the 'dead' state.\nJul 19 12:38:27 managed-node2 systemd[1]: var-lib-containers-storage-overlay-6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-6109d44d6bd9ce0a3baf128dfbfdaefa4e131e16f4ee7d8f1806aa4e28706a2e-merged.mount has successfully entered the 'dead' state.\nJul 19 12:38:27 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:27-04:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c)\"\nJul 19 12:38:27 managed-node2 /usr/bin/podman[29585]: time=\"2025-07-19T12:38:27-04:00\" level=debug msg=\"Shutting down engines\"\nJul 19 12:38:27 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 19 12:38:27 managed-node2 systemd[1]: Stopping libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope.\n-- Subject: Unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has begun shutting down.\nJul 19 12:38:27 managed-node2 systemd[1]: libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has successfully entered the 'dead' state.\nJul 19 12:38:27 managed-node2 systemd[1]: Stopped libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope.\n-- Subject: Unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-conmon-c67c85c8cb2aae9cc679e5a821b6e7588e24b5f253097d63f2ea835b3762b01c.scope has finished shutting down.\nJul 19 12:38:27 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice.\n-- Subject: Unit machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice has finished shutting down.\nJul 19 12:38:27 managed-node2 systemd[1]: machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice: Consumed 198ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816.slice completed and consumed the indicated resources.\nJul 19 12:38:27 managed-node2 podman[29570]: Pods stopped:\nJul 19 12:38:27 managed-node2 podman[29570]: f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816\nJul 19 12:38:27 managed-node2 podman[29570]: Pods removed:\nJul 19 12:38:27 managed-node2 podman[29570]: f8000a88fe4a66c90624f19a2680e9e647b805887ba0258d4f6ab33570112816\nJul 19 12:38:27 managed-node2 podman[29570]: Secrets removed:\nJul 19 12:38:27 managed-node2 podman[29570]: Volumes removed:\nJul 19 12:38:27 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice.\n-- Subject: Unit machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:27 managed-node2 systemd[1]: Started libcontainer container 4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.\n-- Subject: Unit libpod-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:27 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vetha38befe0: link is not ready\nJul 19 12:38:27 managed-node2 systemd-udevd[29728]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.\nJul 19 12:38:27 managed-node2 systemd-udevd[29728]: Could not generate persistent MAC address for vetha38befe0: No such file or directory\nJul 19 12:38:27 managed-node2 NetworkManager[661]: [1752943107.3392] manager: (vetha38befe0): new Veth device (/org/freedesktop/NetworkManager/Devices/5)\nJul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered blocking state\nJul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered disabled state\nJul 19 12:38:27 managed-node2 kernel: device vetha38befe0 entered promiscuous mode\nJul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered blocking state\nJul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered forwarding state\nJul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered disabled state\nJul 19 12:38:27 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vetha38befe0: link becomes ready\nJul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered blocking state\nJul 19 12:38:27 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered forwarding state\nJul 19 12:38:27 managed-node2 NetworkManager[661]: [1752943107.3666] device (vetha38befe0): carrier: link connected\nJul 19 12:38:27 managed-node2 NetworkManager[661]: [1752943107.3670] device (cni-podman1): carrier: link connected\nJul 19 12:38:27 managed-node2 dnsmasq[29798]: listening on cni-podman1(#3): 10.89.0.1\nJul 19 12:38:27 managed-node2 dnsmasq[29798]: listening on cni-podman1(#3): fe80::ec2d:d5ff:fe97:9a6b%cni-podman1\nJul 19 12:38:27 managed-node2 dnsmasq[29802]: started, version 2.79 cachesize 150\nJul 19 12:38:27 managed-node2 dnsmasq[29802]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify\nJul 19 12:38:27 managed-node2 dnsmasq[29802]: using local addresses only for domain dns.podman\nJul 19 12:38:27 managed-node2 dnsmasq[29802]: reading /etc/resolv.conf\nJul 19 12:38:27 managed-node2 dnsmasq[29802]: using local addresses only for domain dns.podman\nJul 19 12:38:27 managed-node2 dnsmasq[29802]: using nameserver 10.29.169.13#53\nJul 19 12:38:27 managed-node2 dnsmasq[29802]: using nameserver 10.29.170.12#53\nJul 19 12:38:27 managed-node2 dnsmasq[29802]: using nameserver 10.2.32.1#53\nJul 19 12:38:27 managed-node2 dnsmasq[29802]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses\nJul 19 12:38:27 managed-node2 systemd[1]: Started libcontainer container 64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.\n-- Subject: Unit libpod-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:27 managed-node2 systemd[1]: Started libcontainer container d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.\n-- Subject: Unit libpod-d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:27 managed-node2 podman[29570]: Pod:\nJul 19 12:38:27 managed-node2 podman[29570]: 63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7\nJul 19 12:38:27 managed-node2 podman[29570]: Container:\nJul 19 12:38:27 managed-node2 podman[29570]: d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425\nJul 19 12:38:27 managed-node2 systemd[1]: Started A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:28 managed-node2 platform-python[29967]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:38:29 managed-node2 platform-python[30100]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:30 managed-node2 platform-python[30224]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:38:31 managed-node2 platform-python[30347]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:38:32 managed-node2 platform-python[30636]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:38:32 managed-node2 platform-python[30759]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:38:33 managed-node2 platform-python[30882]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 19 12:38:33 managed-node2 platform-python[30981]: ansible-copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1752943113.1342852-16787-124035634200669/source _original_basename=tmpprx4cnlk follow=False checksum=6f620a32a353317135005413ecc9cbab44a8759d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None\nJul 19 12:38:34 managed-node2 platform-python[31106]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:38:34 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice.\n-- Subject: Unit machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:34 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vethca04a2e2: link is not ready\nJul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered blocking state\nJul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered disabled state\nJul 19 12:38:34 managed-node2 kernel: device vethca04a2e2 entered promiscuous mode\nJul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered blocking state\nJul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered forwarding state\nJul 19 12:38:34 managed-node2 NetworkManager[661]: [1752943114.3363] manager: (vethca04a2e2): new Veth device (/org/freedesktop/NetworkManager/Devices/6)\nJul 19 12:38:34 managed-node2 systemd-udevd[31156]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.\nJul 19 12:38:34 managed-node2 systemd-udevd[31156]: Could not generate persistent MAC address for vethca04a2e2: No such file or directory\nJul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered disabled state\nJul 19 12:38:34 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready\nJul 19 12:38:34 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready\nJul 19 12:38:34 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vethca04a2e2: link becomes ready\nJul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered blocking state\nJul 19 12:38:34 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered forwarding state\nJul 19 12:38:34 managed-node2 NetworkManager[661]: [1752943114.3654] device (vethca04a2e2): carrier: link connected\nJul 19 12:38:34 managed-node2 dnsmasq[29802]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 2 addresses\nJul 19 12:38:34 managed-node2 systemd[1]: Started libpod-conmon-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope.\n-- Subject: Unit libpod-conmon-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-conmon-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:34 managed-node2 systemd[1]: Started libcontainer container 9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.\n-- Subject: Unit libpod-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:34 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nJul 19 12:38:34 managed-node2 systemd[1]: Started libpod-conmon-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope.\n-- Subject: Unit libpod-conmon-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-conmon-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:34 managed-node2 systemd[1]: Started libcontainer container 798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.\n-- Subject: Unit libpod-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:35 managed-node2 platform-python[31387]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None\nJul 19 12:38:35 managed-node2 systemd[1]: Reloading.\nJul 19 12:38:35 managed-node2 platform-python[31548]: ansible-systemd Invoked with name= scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None user=None\nJul 19 12:38:35 managed-node2 systemd[1]: Reloading.\nJul 19 12:38:36 managed-node2 platform-python[31703]: ansible-systemd Invoked with name= scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None\nJul 19 12:38:36 managed-node2 systemd[1]: Starting A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun starting up.\nJul 19 12:38:36 managed-node2 systemd[1]: libpod-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope has successfully entered the 'dead' state.\nJul 19 12:38:36 managed-node2 systemd[1]: libpod-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope: Consumed 34ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope completed and consumed the indicated resources.\nJul 19 12:38:36 managed-node2 systemd[1]: libpod-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope has successfully entered the 'dead' state.\nJul 19 12:38:36 managed-node2 systemd[1]: libpod-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope: Consumed 36ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope completed and consumed the indicated resources.\nJul 19 12:38:36 managed-node2 dnsmasq[29802]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses\nJul 19 12:38:36 managed-node2 systemd[1]: var-lib-containers-storage-overlay-0e27a2de6423f234f5c5cc21592f99c374ae3f65ee2ffe512e2ea9260072c30b-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-0e27a2de6423f234f5c5cc21592f99c374ae3f65ee2ffe512e2ea9260072c30b-merged.mount has successfully entered the 'dead' state.\nJul 19 12:38:36 managed-node2 systemd[1]: libpod-conmon-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-conmon-798f49f831d4bdb878cb43e1d0255660d7949c9d620e71377172122b09936943.scope has successfully entered the 'dead' state.\nJul 19 12:38:36 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered disabled state\nJul 19 12:38:36 managed-node2 kernel: device vethca04a2e2 left promiscuous mode\nJul 19 12:38:36 managed-node2 kernel: cni-podman1: port 2(vethca04a2e2) entered disabled state\nJul 19 12:38:36 managed-node2 systemd[1]: run-netns-netns\\x2dbc35cf78\\x2d8b29\\x2d812d\\x2d8688\\x2d6b3a472533c6.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-netns-netns\\x2dbc35cf78\\x2d8b29\\x2d812d\\x2d8688\\x2d6b3a472533c6.mount has successfully entered the 'dead' state.\nJul 19 12:38:36 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91-userdata-shm.mount has successfully entered the 'dead' state.\nJul 19 12:38:36 managed-node2 systemd[1]: var-lib-containers-storage-overlay-50218d54ec26584adc0c1ba212de5d0b7c4329564918e9762d222c23ddef0ca1-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-50218d54ec26584adc0c1ba212de5d0b7c4329564918e9762d222c23ddef0ca1-merged.mount has successfully entered the 'dead' state.\nJul 19 12:38:36 managed-node2 systemd[1]: libpod-conmon-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-conmon-9113150baac76971b3cad9eb4ecd13688376b8acffa3441f8a9744007a717c91.scope has successfully entered the 'dead' state.\nJul 19 12:38:36 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice.\n-- Subject: Unit machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice has finished shutting down.\nJul 19 12:38:36 managed-node2 systemd[1]: machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice: Consumed 192ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a.slice completed and consumed the indicated resources.\nJul 19 12:38:36 managed-node2 podman[31710]: Pods stopped:\nJul 19 12:38:36 managed-node2 podman[31710]: 7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a\nJul 19 12:38:36 managed-node2 podman[31710]: Pods removed:\nJul 19 12:38:36 managed-node2 podman[31710]: 7868090df2e6ebb87ea2960fd745759c44408f84809653b2783f40f36eb1c48a\nJul 19 12:38:36 managed-node2 podman[31710]: Secrets removed:\nJul 19 12:38:36 managed-node2 podman[31710]: Volumes removed:\nJul 19 12:38:37 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice.\n-- Subject: Unit machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:37 managed-node2 systemd[1]: Started libcontainer container c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.\n-- Subject: Unit libpod-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:37 managed-node2 NetworkManager[661]: [1752943117.2271] manager: (vetha6d4d23e): new Veth device (/org/freedesktop/NetworkManager/Devices/7)\nJul 19 12:38:37 managed-node2 systemd-udevd[31876]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.\nJul 19 12:38:37 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vetha6d4d23e: link is not ready\nJul 19 12:38:37 managed-node2 systemd-udevd[31876]: Could not generate persistent MAC address for vetha6d4d23e: No such file or directory\nJul 19 12:38:37 managed-node2 kernel: cni-podman1: port 2(vetha6d4d23e) entered blocking state\nJul 19 12:38:37 managed-node2 kernel: cni-podman1: port 2(vetha6d4d23e) entered disabled state\nJul 19 12:38:37 managed-node2 kernel: device vetha6d4d23e entered promiscuous mode\nJul 19 12:38:37 managed-node2 kernel: cni-podman1: port 2(vetha6d4d23e) entered blocking state\nJul 19 12:38:37 managed-node2 kernel: cni-podman1: port 2(vetha6d4d23e) entered forwarding state\nJul 19 12:38:37 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vetha6d4d23e: link becomes ready\nJul 19 12:38:37 managed-node2 NetworkManager[661]: [1752943117.2430] device (vetha6d4d23e): carrier: link connected\nJul 19 12:38:37 managed-node2 dnsmasq[29802]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 2 addresses\nJul 19 12:38:37 managed-node2 systemd[1]: Started libcontainer container 8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.\n-- Subject: Unit libpod-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:37 managed-node2 systemd[1]: Started libcontainer container bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.\n-- Subject: Unit libpod-bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:37 managed-node2 podman[31710]: Pod:\nJul 19 12:38:37 managed-node2 podman[31710]: 78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf\nJul 19 12:38:37 managed-node2 podman[31710]: Container:\nJul 19 12:38:37 managed-node2 podman[31710]: bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c\nJul 19 12:38:37 managed-node2 systemd[1]: Started A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:38 managed-node2 sudo[32108]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dlblsunulyjazdrjwbxpqfeiydnfqnsr ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943118.083963-17014-206060369573326/AnsiballZ_command.py'\nJul 19 12:38:38 managed-node2 sudo[32108]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:38:38 managed-node2 platform-python[32111]: ansible-command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:38 managed-node2 systemd[25528]: Started podman-32120.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:38:38 managed-node2 sudo[32108]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:38:38 managed-node2 platform-python[32258]: ansible-command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:39 managed-node2 platform-python[32389]: ansible-command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:39 managed-node2 sudo[32520]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-eeofqvmjjevxeutcqzjhxoeuxycorapq ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943119.4551659-17079-9632647473971/AnsiballZ_command.py'\nJul 19 12:38:39 managed-node2 sudo[32520]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:38:39 managed-node2 platform-python[32523]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:39 managed-node2 sudo[32520]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:38:40 managed-node2 platform-python[32649]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:40 managed-node2 platform-python[32775]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:41 managed-node2 platform-python[32901]: ansible-uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:38:41 managed-node2 platform-python[33025]: ansible-uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:38:41 managed-node2 rsyslogd[1019]: imjournal: journal files changed, reloading... [v8.2102.0-15.el8 try https://www.rsyslog.com/e/0 ]\nJul 19 12:38:41 managed-node2 platform-python[33150]: ansible-command Invoked with _raw_params=ls -alrtF /tmp/lsr_s72m6vdk_podman/httpd1-create warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:42 managed-node2 platform-python[33274]: ansible-command Invoked with _raw_params=ls -alrtF /tmp/lsr_s72m6vdk_podman/httpd2-create warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:42 managed-node2 platform-python[33398]: ansible-command Invoked with _raw_params=ls -alrtF /tmp/lsr_s72m6vdk_podman/httpd3-create warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:45 managed-node2 platform-python[33647]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:38:46 managed-node2 platform-python[33775]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:38:49 managed-node2 platform-python[33900]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:38:52 managed-node2 platform-python[34023]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 19 12:38:52 managed-node2 platform-python[34150]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 19 12:38:53 managed-node2 platform-python[34277]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 19 12:38:55 managed-node2 platform-python[34400]: ansible-dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:38:58 managed-node2 platform-python[34523]: ansible-dnf Invoked with name=['grubby'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:39:01 managed-node2 platform-python[34646]: ansible-dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:39:03 managed-node2 platform-python[34769]: ansible-setup Invoked with filter=ansible_selinux gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 19 12:39:05 managed-node2 platform-python[34930]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 19 12:39:06 managed-node2 platform-python[35053]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 19 12:39:10 managed-node2 platform-python[35176]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None\nJul 19 12:39:11 managed-node2 platform-python[35300]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:11 managed-node2 platform-python[35425]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:12 managed-node2 platform-python[35549]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:13 managed-node2 platform-python[35673]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:14 managed-node2 platform-python[35797]: ansible-command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None\nJul 19 12:39:14 managed-node2 platform-python[35920]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:15 managed-node2 platform-python[36043]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:15 managed-node2 sudo[36166]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wbomjbspvcxjkqucdsequagxlcclxnab ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943155.3657928-18727-255969634455656/AnsiballZ_podman_image.py'\nJul 19 12:39:15 managed-node2 sudo[36166]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:39:15 managed-node2 systemd[25528]: Started podman-36171.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:39:15 managed-node2 systemd[25528]: Started podman-36179.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:39:15 managed-node2 systemd[25528]: Started podman-36187.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:39:15 managed-node2 systemd[25528]: Started podman-36195.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:39:15 managed-node2 systemd[25528]: Started podman-36203.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:39:16 managed-node2 systemd[25528]: Started podman-36211.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:39:16 managed-node2 sudo[36166]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:39:16 managed-node2 platform-python[36340]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:17 managed-node2 platform-python[36465]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:17 managed-node2 platform-python[36588]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 19 12:39:17 managed-node2 platform-python[36652]: ansible-file Invoked with owner=podman_basic_user group=3001 mode=0644 dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _original_basename=tmpvcvwgcl1 recurse=False state=file path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:18 managed-node2 sudo[36775]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mwgfktigaxpvesxpytdoaduxtkxpnkwn ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943158.1234493-18828-35756619876473/AnsiballZ_podman_play.py'\nJul 19 12:39:18 managed-node2 sudo[36775]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:39:18 managed-node2 platform-python[36778]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:39:18 managed-node2 systemd[25528]: Started podman-36786.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:39:18 managed-node2 platform-python[36778]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml\nJul 19 12:39:18 managed-node2 platform-python[36778]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: \nJul 19 12:39:18 managed-node2 platform-python[36778]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time=\"2025-07-19T12:39:18-04:00\" level=info msg=\"/bin/podman filtering at log level debug\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2025-07-19T12:39:18-04:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Using graph root /home/podman_basic_user/.local/share/containers/storage\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Using run root /run/user/3001/containers\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Using tmp dir /run/user/3001/libpod/tmp\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Using transient store: false\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Cached value indicated that metacopy is not being used\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Cached value indicated that native-diff is usable\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Initializing event backend file\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\n time=\"2025-07-19T12:39:18-04:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-19 12:38:13.521272 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Successfully loaded 2 networks\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Looking up image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\\\"\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63)\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9ff8feff5573971b586c5f4acafd5da3d2da6cad736403341f337b0c7449bd63\\\"\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Pod using bridge network mode\"\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Got pod cgroup as /libpod_parent/1846ecb89dbdb057faef33ff14bed3ee782f5fffa65b2fd38248f39e0fe82c96\"\n Error: adding pod to state: name \"httpd1\" is in use: pod already exists\n time=\"2025-07-19T12:39:18-04:00\" level=debug msg=\"Shutting down engines\"\nJul 19 12:39:18 managed-node2 platform-python[36778]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125\nJul 19 12:39:18 managed-node2 sudo[36775]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:39:19 managed-node2 platform-python[36940]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 19 12:39:20 managed-node2 platform-python[37064]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:21 managed-node2 platform-python[37189]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:22 managed-node2 platform-python[37313]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:22 managed-node2 platform-python[37436]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:23 managed-node2 platform-python[37727]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:24 managed-node2 platform-python[37852]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:24 managed-node2 platform-python[37975]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 19 12:39:24 managed-node2 platform-python[38039]: ansible-file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd2.yml _original_basename=tmpoct5ap5y recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd2.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:25 managed-node2 platform-python[38162]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:39:25 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19.slice.\n-- Subject: Unit machine-libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:39:25 managed-node2 platform-python[38162]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml\nJul 19 12:39:25 managed-node2 platform-python[38162]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: \nJul 19 12:39:25 managed-node2 platform-python[38162]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time=\"2025-07-19T12:39:25-04:00\" level=info msg=\"/usr/bin/podman filtering at log level debug\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2025-07-19T12:39:25-04:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Using run root /run/containers/storage\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Using tmp dir /run/libpod\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Using transient store: false\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\n time=\"2025-07-19T12:39:25-04:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Initializing event backend file\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/runc\\\"\"\n time=\"2025-07-19T12:39:25-04:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-07-19 12:35:55.640649556 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Successfully loaded 2 networks\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Looking up image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Trying \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" ...\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\\\"\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Found image \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" as \\\"localhost/podman-pause:4.9.4-dev-1708535009\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a)\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:fcd1d7de3030adbbc5b3325f748e0ddb4920c8864d46b3718e6cf77e25ad127a\\\"\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Pod using bridge network mode\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19.slice for parent machine.slice and name libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19.slice\"\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_4f690c322dcc4119edaad19b1d92199e85b66f23ea00661f7843147261bdcf19.slice\"\n Error: adding pod to state: name \"httpd2\" is in use: pod already exists\n time=\"2025-07-19T12:39:25-04:00\" level=debug msg=\"Shutting down engines\"\nJul 19 12:39:25 managed-node2 platform-python[38162]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125\nJul 19 12:39:26 managed-node2 platform-python[38323]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:27 managed-node2 platform-python[38448]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:28 managed-node2 platform-python[38572]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:29 managed-node2 platform-python[38695]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:30 managed-node2 platform-python[38984]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:31 managed-node2 platform-python[39109]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:31 managed-node2 platform-python[39232]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True\nJul 19 12:39:31 managed-node2 platform-python[39296]: ansible-file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd3.yml _original_basename=tmp8akhw61j recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd3.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:32 managed-node2 platform-python[39419]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:39:32 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_cb3b0a5bd9971330489767c2081b22911aea01140e5cd90be87bd649d4122b98.slice.\n-- Subject: Unit machine-libpod_pod_cb3b0a5bd9971330489767c2081b22911aea01140e5cd90be87bd649d4122b98.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_cb3b0a5bd9971330489767c2081b22911aea01140e5cd90be87bd649d4122b98.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:39:33 managed-node2 sudo[39581]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hrgfwbfwhnrhozyrljgrwuftlafbcvgj ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943172.9515133-19598-262459141060618/AnsiballZ_command.py'\nJul 19 12:39:33 managed-node2 sudo[39581]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:39:33 managed-node2 platform-python[39584]: ansible-command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:33 managed-node2 systemd[25528]: Started podman-39593.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:39:33 managed-node2 sudo[39581]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:39:33 managed-node2 platform-python[39723]: ansible-command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:34 managed-node2 platform-python[39854]: ansible-command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:34 managed-node2 sudo[39985]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kapxhijpsmqpbluyhztflsgdqehfqzsf ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943174.2658803-19637-151873258581255/AnsiballZ_command.py'\nJul 19 12:39:34 managed-node2 sudo[39985]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:39:34 managed-node2 platform-python[39988]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:34 managed-node2 sudo[39985]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:39:34 managed-node2 platform-python[40114]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:35 managed-node2 platform-python[40240]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:35 managed-node2 platform-python[40366]: ansible-uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:36 managed-node2 platform-python[40490]: ansible-uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:36 managed-node2 platform-python[40614]: ansible-uri Invoked with url=http://localhost:15003/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:39 managed-node2 platform-python[40863]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:40 managed-node2 platform-python[40992]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:43 managed-node2 platform-python[41117]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None\nJul 19 12:39:44 managed-node2 platform-python[41241]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:44 managed-node2 platform-python[41366]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:44 managed-node2 platform-python[41490]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:45 managed-node2 platform-python[41614]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:46 managed-node2 platform-python[41738]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:46 managed-node2 sudo[41863]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bsbqhhxxcupnkafozyypbjqjdnfplilf ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943186.6742442-20261-89535707061551/AnsiballZ_systemd.py'\nJul 19 12:39:46 managed-node2 sudo[41863]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:39:47 managed-node2 platform-python[41866]: ansible-systemd Invoked with name= scope=user state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 19 12:39:47 managed-node2 systemd[25528]: Reloading.\nJul 19 12:39:47 managed-node2 systemd[25528]: Stopping A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit UNIT has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has begun shutting down.\nJul 19 12:39:47 managed-node2 kernel: cni-podman1: port 1(veth24653eaf) entered disabled state\nJul 19 12:39:47 managed-node2 kernel: device veth24653eaf left promiscuous mode\nJul 19 12:39:47 managed-node2 kernel: cni-podman1: port 1(veth24653eaf) entered disabled state\nJul 19 12:39:47 managed-node2 podman[41882]: Pods stopped:\nJul 19 12:39:47 managed-node2 podman[41882]: 2d1d21944d4ca561d0a237f029182068a925bb49513d0d920c7ad527b530a87a\nJul 19 12:39:47 managed-node2 podman[41882]: Pods removed:\nJul 19 12:39:47 managed-node2 podman[41882]: 2d1d21944d4ca561d0a237f029182068a925bb49513d0d920c7ad527b530a87a\nJul 19 12:39:47 managed-node2 podman[41882]: Secrets removed:\nJul 19 12:39:47 managed-node2 podman[41882]: Volumes removed:\nJul 19 12:39:47 managed-node2 systemd[25528]: Stopped A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 19 12:39:47 managed-node2 sudo[41863]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:39:48 managed-node2 platform-python[42156]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:48 managed-node2 sudo[42281]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rkhrtubvxxvbakhpbdfuonkadzawohqm ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943188.1900835-20342-278852314562176/AnsiballZ_podman_play.py'\nJul 19 12:39:48 managed-node2 sudo[42281]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:39:48 managed-node2 platform-python[42284]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:39:48 managed-node2 platform-python[42284]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml\nJul 19 12:39:48 managed-node2 systemd[25528]: Started podman-42292.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:39:48 managed-node2 platform-python[42284]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman kube play --down /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml\nJul 19 12:39:48 managed-node2 platform-python[42284]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped:\n Pods removed:\n Secrets removed:\n Volumes removed:\nJul 19 12:39:48 managed-node2 platform-python[42284]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: \nJul 19 12:39:48 managed-node2 platform-python[42284]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0\nJul 19 12:39:48 managed-node2 sudo[42281]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:39:48 managed-node2 platform-python[42421]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:50 managed-node2 platform-python[42544]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 19 12:39:50 managed-node2 platform-python[42668]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:51 managed-node2 platform-python[42793]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:52 managed-node2 platform-python[42917]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 19 12:39:52 managed-node2 systemd[1]: Reloading.\nJul 19 12:39:52 managed-node2 systemd[1]: Stopping A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun shutting down.\nJul 19 12:39:52 managed-node2 systemd[1]: libpod-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.scope has successfully entered the 'dead' state.\nJul 19 12:39:52 managed-node2 systemd[1]: libpod-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3.scope completed and consumed the indicated resources.\nJul 19 12:39:52 managed-node2 systemd[1]: libpod-d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.scope has successfully entered the 'dead' state.\nJul 19 12:39:52 managed-node2 systemd[1]: libpod-d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-d26ff82467ef7baced96d001bdafe095b5eef6116f1879858d6f9b38feeef425.scope completed and consumed the indicated resources.\nJul 19 12:39:52 managed-node2 dnsmasq[29802]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses\nJul 19 12:39:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay-1ac1165dcb590ce00bffba4600c63f5cfb3b70afb8f380b4edeace6635fcdfe3-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-1ac1165dcb590ce00bffba4600c63f5cfb3b70afb8f380b4edeace6635fcdfe3-merged.mount has successfully entered the 'dead' state.\nJul 19 12:39:52 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered disabled state\nJul 19 12:39:52 managed-node2 kernel: device vetha38befe0 left promiscuous mode\nJul 19 12:39:52 managed-node2 kernel: cni-podman1: port 1(vetha38befe0) entered disabled state\nJul 19 12:39:52 managed-node2 systemd[1]: run-netns-netns\\x2d85ef15c4\\x2d2df7\\x2d918e\\x2d907f\\x2dc88b265faa98.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-netns-netns\\x2d85ef15c4\\x2d2df7\\x2d918e\\x2d907f\\x2dc88b265faa98.mount has successfully entered the 'dead' state.\nJul 19 12:39:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-64ca61e96d546dc6b70677ee904ea41b8549d40002627436fb4da91a1d5070c3-userdata-shm.mount has successfully entered the 'dead' state.\nJul 19 12:39:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay-38daa2f903ec0433792b188cb05d307d74de74874667479598255b129c8e533b-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-38daa2f903ec0433792b188cb05d307d74de74874667479598255b129c8e533b-merged.mount has successfully entered the 'dead' state.\nJul 19 12:39:52 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice.\n-- Subject: Unit machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice has finished shutting down.\nJul 19 12:39:52 managed-node2 systemd[1]: machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice: Consumed 66ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7.slice completed and consumed the indicated resources.\nJul 19 12:39:52 managed-node2 podman[42953]: Pods stopped:\nJul 19 12:39:52 managed-node2 podman[42953]: 63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7\nJul 19 12:39:52 managed-node2 podman[42953]: Pods removed:\nJul 19 12:39:52 managed-node2 podman[42953]: 63cd36510fe76fd1dc36c0123954743adbae5839a5e499873ecd14271e62adc7\nJul 19 12:39:52 managed-node2 podman[42953]: Secrets removed:\nJul 19 12:39:52 managed-node2 podman[42953]: Volumes removed:\nJul 19 12:39:52 managed-node2 systemd[1]: libpod-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.scope has successfully entered the 'dead' state.\nJul 19 12:39:52 managed-node2 systemd[1]: libpod-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.scope: Consumed 34ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a.scope completed and consumed the indicated resources.\nJul 19 12:39:53 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-4c0eceab94b00c7138ed24c1326429dab3a40ba5fa3084ac8447898417f8392a-userdata-shm.mount has successfully entered the 'dead' state.\nJul 19 12:39:53 managed-node2 dnsmasq[29802]: exiting on receipt of SIGTERM\nJul 19 12:39:53 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has successfully entered the 'dead' state.\nJul 19 12:39:53 managed-node2 systemd[1]: Stopped A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished shutting down.\nJul 19 12:39:53 managed-node2 platform-python[43230]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:53 managed-node2 systemd[1]: var-lib-containers-storage-overlay-946e50296936b22c6a0cd6493841882848a8040824e6c32355272e3fbcd82469-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-946e50296936b22c6a0cd6493841882848a8040824e6c32355272e3fbcd82469-merged.mount has successfully entered the 'dead' state.\nJul 19 12:39:53 managed-node2 platform-python[43355]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:39:53 managed-node2 platform-python[43355]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/httpd2.yml\nJul 19 12:39:53 managed-node2 platform-python[43355]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman kube play --down /etc/containers/ansible-kubernetes.d/httpd2.yml\nJul 19 12:39:53 managed-node2 platform-python[43355]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped:\n Pods removed:\n Secrets removed:\n Volumes removed:\nJul 19 12:39:53 managed-node2 platform-python[43355]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: \nJul 19 12:39:53 managed-node2 platform-python[43355]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0\nJul 19 12:39:54 managed-node2 platform-python[43492]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:39:55 managed-node2 platform-python[43615]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:56 managed-node2 platform-python[43740]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:39:57 managed-node2 platform-python[43864]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 19 12:39:57 managed-node2 systemd[1]: Reloading.\nJul 19 12:39:57 managed-node2 systemd[1]: Stopping A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun shutting down.\nJul 19 12:39:57 managed-node2 systemd[1]: libpod-bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.scope has successfully entered the 'dead' state.\nJul 19 12:39:57 managed-node2 systemd[1]: libpod-bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-bd0d4c33c2ae2f274afb0d5b291d6b1eb807d7f4d8b75d1dcfd10bd56c264e4c.scope completed and consumed the indicated resources.\nJul 19 12:39:57 managed-node2 systemd[1]: libpod-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.scope has successfully entered the 'dead' state.\nJul 19 12:39:57 managed-node2 systemd[1]: libpod-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6.scope completed and consumed the indicated resources.\nJul 19 12:39:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay-e4d65582a94e2bdc8a1eaad3e5573271c39b373e604383029d5c678d2ac244a1-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-e4d65582a94e2bdc8a1eaad3e5573271c39b373e604383029d5c678d2ac244a1-merged.mount has successfully entered the 'dead' state.\nJul 19 12:39:57 managed-node2 kernel: cni-podman1: port 2(vetha6d4d23e) entered disabled state\nJul 19 12:39:57 managed-node2 kernel: device vetha6d4d23e left promiscuous mode\nJul 19 12:39:57 managed-node2 kernel: cni-podman1: port 2(vetha6d4d23e) entered disabled state\nJul 19 12:39:57 managed-node2 systemd[1]: run-netns-netns\\x2d4c7240ed\\x2da995\\x2deb80\\x2d20f7\\x2d420676ba3f43.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-netns-netns\\x2d4c7240ed\\x2da995\\x2deb80\\x2d20f7\\x2d420676ba3f43.mount has successfully entered the 'dead' state.\nJul 19 12:39:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-8a2fdf1656cbe2234a24e76f3221bd30d0cd31fc05007211cbb6e874ce9296b6-userdata-shm.mount has successfully entered the 'dead' state.\nJul 19 12:39:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay-9dcce2629a28829a7d47e45b786a91b8326ac6500c27209769c1539d9f082e74-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-9dcce2629a28829a7d47e45b786a91b8326ac6500c27209769c1539d9f082e74-merged.mount has successfully entered the 'dead' state.\nJul 19 12:39:57 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice.\n-- Subject: Unit machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice has finished shutting down.\nJul 19 12:39:57 managed-node2 systemd[1]: machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice: Consumed 67ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf.slice completed and consumed the indicated resources.\nJul 19 12:39:58 managed-node2 podman[43900]: Pods stopped:\nJul 19 12:39:58 managed-node2 podman[43900]: 78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf\nJul 19 12:39:58 managed-node2 podman[43900]: Pods removed:\nJul 19 12:39:58 managed-node2 podman[43900]: 78a84c01c27f5f8e508cafa0a254c389d553197c41035724e2f523a7aef687cf\nJul 19 12:39:58 managed-node2 podman[43900]: Secrets removed:\nJul 19 12:39:58 managed-node2 podman[43900]: Volumes removed:\nJul 19 12:39:58 managed-node2 systemd[1]: libpod-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.scope has successfully entered the 'dead' state.\nJul 19 12:39:58 managed-node2 systemd[1]: libpod-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c.scope completed and consumed the indicated resources.\nJul 19 12:39:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-c3236233372a18965a640ca894c092d100c3196512502cbf81e713d53a8d732c-userdata-shm.mount has successfully entered the 'dead' state.\nJul 19 12:39:58 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has successfully entered the 'dead' state.\nJul 19 12:39:58 managed-node2 systemd[1]: Stopped A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished shutting down.\nJul 19 12:39:58 managed-node2 platform-python[44169]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:39:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay-c944fbd43e1673bcb0e2412bde6d753cffca05c01ef505aa29441df09b37e4f0-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-c944fbd43e1673bcb0e2412bde6d753cffca05c01ef505aa29441df09b37e4f0-merged.mount has successfully entered the 'dead' state.\nJul 19 12:39:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 19 12:39:58 managed-node2 platform-python[44294]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 19 12:39:58 managed-node2 platform-python[44294]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/httpd3.yml\nJul 19 12:39:59 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 19 12:39:59 managed-node2 platform-python[44430]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:40:00 managed-node2 platform-python[44553]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None\nJul 19 12:40:01 managed-node2 platform-python[44677]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:01 managed-node2 sudo[44802]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zjfnlsrhavffrrytzlezjdprwiglzjsy ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943201.3476377-20989-80415109164671/AnsiballZ_podman_container_info.py'\nJul 19 12:40:01 managed-node2 sudo[44802]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:40:01 managed-node2 platform-python[44805]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None\nJul 19 12:40:01 managed-node2 systemd[25528]: Started podman-44807.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:40:01 managed-node2 sudo[44802]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:40:02 managed-node2 sudo[44936]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kqzzrdenthgaimpshaehnjtpvgbskyzt ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943201.9272537-21012-43022464511697/AnsiballZ_command.py'\nJul 19 12:40:02 managed-node2 sudo[44936]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:40:02 managed-node2 platform-python[44939]: ansible-command Invoked with _raw_params=podman network ls -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:02 managed-node2 systemd[25528]: Started podman-44941.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:40:02 managed-node2 sudo[44936]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:40:02 managed-node2 sudo[45096]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wfpjnxfzctwnxksvtryonkmxvjvkhduq ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943202.41182-21042-156572471435781/AnsiballZ_command.py'\nJul 19 12:40:02 managed-node2 sudo[45096]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:40:02 managed-node2 platform-python[45099]: ansible-command Invoked with _raw_params=podman secret ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:02 managed-node2 systemd[25528]: Started podman-45101.scope.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:40:02 managed-node2 sudo[45096]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:40:03 managed-node2 platform-python[45230]: ansible-command Invoked with removes=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl disable-linger podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None stdin=None\nJul 19 12:40:03 managed-node2 systemd[1]: Stopping User Manager for UID 3001...\n-- Subject: Unit user@3001.service has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user@3001.service has begun shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Stopped target Default.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Stopping D-Bus User Message Bus...\n-- Subject: Unit UNIT has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has begun shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Removed slice podman\\x2dkube.slice.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Stopping podman-pause-1458d7a0.scope.\n-- Subject: Unit UNIT has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has begun shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Stopped D-Bus User Message Bus.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Stopped target Basic System.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Stopped target Timers.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Stopped Mark boot as successful after the user session has run 2 minutes.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Stopped target Sockets.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Stopped target Paths.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Closed D-Bus User Message Bus Socket.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Stopped podman-pause-1458d7a0.scope.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Removed slice user.slice.\n-- Subject: Unit UNIT has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[25528]: Reached target Shutdown.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:40:03 managed-node2 systemd[25528]: Started Exit the Session.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:40:03 managed-node2 systemd[25528]: Reached target Exit the Session.\n-- Subject: Unit UNIT has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit UNIT has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:40:03 managed-node2 systemd[1]: user@3001.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit user@3001.service has successfully entered the 'dead' state.\nJul 19 12:40:03 managed-node2 systemd[1]: Stopped User Manager for UID 3001.\n-- Subject: Unit user@3001.service has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user@3001.service has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[1]: Stopping User runtime directory /run/user/3001...\n-- Subject: Unit user-runtime-dir@3001.service has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user-runtime-dir@3001.service has begun shutting down.\nJul 19 12:40:03 managed-node2 systemd[1]: run-user-3001.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-user-3001.mount has successfully entered the 'dead' state.\nJul 19 12:40:03 managed-node2 systemd[1]: user-runtime-dir@3001.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit user-runtime-dir@3001.service has successfully entered the 'dead' state.\nJul 19 12:40:03 managed-node2 systemd[1]: Stopped User runtime directory /run/user/3001.\n-- Subject: Unit user-runtime-dir@3001.service has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user-runtime-dir@3001.service has finished shutting down.\nJul 19 12:40:03 managed-node2 systemd[1]: Removed slice User Slice of UID 3001.\n-- Subject: Unit user-3001.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit user-3001.slice has finished shutting down.\nJul 19 12:40:03 managed-node2 platform-python[45362]: ansible-command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:04 managed-node2 sudo[45486]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dlpvabpxjyeqjdweosdqhelprmnnraei ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943204.1405673-21146-194288526342265/AnsiballZ_command.py'\nJul 19 12:40:04 managed-node2 sudo[45486]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:40:04 managed-node2 platform-python[45489]: ansible-command Invoked with _raw_params=podman pod exists httpd1 warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:04 managed-node2 sudo[45486]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:40:04 managed-node2 platform-python[45619]: ansible-command Invoked with _raw_params=podman pod exists httpd2 warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:05 managed-node2 platform-python[45749]: ansible-command Invoked with _raw_params=podman pod exists httpd3 warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:05 managed-node2 sudo[45879]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uhnqljkrbofbppqpfvsxbbfqaxcbzbnz ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1752943205.3022838-21198-91872805319796/AnsiballZ_command.py'\nJul 19 12:40:05 managed-node2 sudo[45879]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0)\nJul 19 12:40:05 managed-node2 platform-python[45882]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:05 managed-node2 sudo[45879]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 19 12:40:05 managed-node2 platform-python[46008]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:06 managed-node2 platform-python[46134]: ansible-command Invoked with _raw_params= _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:06 managed-node2 platform-python[46260]: ansible-stat Invoked with path=/var/lib/systemd/linger/podman_basic_user follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:09 managed-node2 platform-python[46508]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:10 managed-node2 platform-python[46637]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 19 12:40:11 managed-node2 platform-python[46761]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:13 managed-node2 platform-python[46886]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None\nJul 19 12:40:14 managed-node2 platform-python[47010]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:14 managed-node2 platform-python[47135]: ansible-command Invoked with _raw_params=getsubids podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:15 managed-node2 platform-python[47259]: ansible-command Invoked with _raw_params=getsubids -g podman_basic_user warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:16 managed-node2 platform-python[47383]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:16 managed-node2 platform-python[47507]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:17 managed-node2 platform-python[47630]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:17 managed-node2 platform-python[47753]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:40:19 managed-node2 platform-python[47876]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 19 12:40:19 managed-node2 platform-python[48000]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:20 managed-node2 platform-python[48125]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:21 managed-node2 platform-python[48249]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 19 12:40:22 managed-node2 platform-python[48376]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:22 managed-node2 platform-python[48499]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:40:23 managed-node2 platform-python[48622]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:25 managed-node2 platform-python[48747]: ansible-command Invoked with _raw_params= warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:25 managed-node2 platform-python[48871]: ansible-systemd Invoked with name= scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None\nJul 19 12:40:26 managed-node2 platform-python[48998]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:26 managed-node2 platform-python[49121]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:40:28 managed-node2 platform-python[49244]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None\nJul 19 12:40:28 managed-node2 platform-python[49368]: ansible-stat Invoked with path=/run/user/3001 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:29 managed-node2 platform-python[49491]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:40:29 managed-node2 platform-python[49614]: ansible-file Invoked with path=/tmp/lsr_s72m6vdk_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:40:32 managed-node2 platform-python[49776]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version', 'os_family'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d\nJul 19 12:40:33 managed-node2 platform-python[49903]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:33 managed-node2 platform-python[50026]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:36 managed-node2 platform-python[50274]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:37 managed-node2 platform-python[50403]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 19 12:40:37 managed-node2 platform-python[50527]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:41 managed-node2 platform-python[50691]: ansible-setup Invoked with gather_subset=['all'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d\nJul 19 12:40:44 managed-node2 platform-python[50843]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:45 managed-node2 platform-python[50966]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:47 managed-node2 platform-python[51214]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:48 managed-node2 platform-python[51343]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 19 12:40:48 managed-node2 platform-python[51467]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:54 managed-node2 platform-python[51631]: ansible-setup Invoked with gather_subset=['all'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d\nJul 19 12:40:54 managed-node2 platform-python[51783]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:40:55 managed-node2 platform-python[51906]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:40:56 managed-node2 platform-python[52030]: ansible-dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:40:59 managed-node2 platform-python[52158]: ansible-dnf Invoked with name=['certmonger'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:41:02 managed-node2 dbus-daemon[591]: [system] Reloaded configuration\nJul 19 12:41:02 managed-node2 dbus-daemon[591]: [system] Reloaded configuration\nJul 19 12:41:02 managed-node2 dbus-daemon[591]: [system] Reloaded configuration\nJul 19 12:41:02 managed-node2 dbus-daemon[591]: [system] Reloaded configuration\nJul 19 12:41:02 managed-node2 dbus-daemon[591]: [system] Reloaded configuration\nJul 19 12:41:02 managed-node2 systemd[1]: Reloading.\nJul 19 12:41:02 managed-node2 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update.\n-- Subject: Unit run-r2a2b61b169e54534b6bc9888468488f1.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit run-r2a2b61b169e54534b6bc9888468488f1.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:41:02 managed-node2 systemd[1]: Starting man-db-cache-update.service...\n-- Subject: Unit man-db-cache-update.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit man-db-cache-update.service has begun starting up.\nJul 19 12:41:02 managed-node2 systemd[1]: Reloading.\nJul 19 12:41:03 managed-node2 systemd[1]: man-db-cache-update.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit man-db-cache-update.service has successfully entered the 'dead' state.\nJul 19 12:41:03 managed-node2 systemd[1]: Started man-db-cache-update.service.\n-- Subject: Unit man-db-cache-update.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit man-db-cache-update.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:41:03 managed-node2 systemd[1]: run-r2a2b61b169e54534b6bc9888468488f1.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-r2a2b61b169e54534b6bc9888468488f1.service has successfully entered the 'dead' state.\nJul 19 12:41:04 managed-node2 platform-python[52790]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:41:04 managed-node2 platform-python[52913]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:41:05 managed-node2 platform-python[53036]: ansible-systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 19 12:41:05 managed-node2 systemd[1]: Reloading.\nJul 19 12:41:05 managed-node2 systemd[1]: Starting dnf makecache...\n-- Subject: Unit dnf-makecache.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit dnf-makecache.service has begun starting up.\nJul 19 12:41:05 managed-node2 systemd[1]: Starting Certificate monitoring and PKI enrollment...\n-- Subject: Unit certmonger.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit certmonger.service has begun starting up.\nJul 19 12:41:05 managed-node2 systemd[1]: Started Certificate monitoring and PKI enrollment.\n-- Subject: Unit certmonger.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit certmonger.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:41:05 managed-node2 dnf[53070]: Failed determining last makecache time.\nJul 19 12:41:05 managed-node2 dnf[53070]: CentOS Stream 8 - AppStream 124 kB/s | 4.4 kB 00:00\nJul 19 12:41:05 managed-node2 dnf[53070]: CentOS Stream 8 - BaseOS 100 kB/s | 3.9 kB 00:00\nJul 19 12:41:06 managed-node2 dnf[53070]: CentOS Stream 8 - Extras 83 kB/s | 2.9 kB 00:00\nJul 19 12:41:06 managed-node2 platform-python[53233]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=#\n # Ansible managed\n #\n # system_role:certificate\n booted=True provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None\nJul 19 12:41:06 managed-node2 dnf[53070]: CentOS Stream 8 - Extras common packages 74 kB/s | 3.0 kB 00:00\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 dnf[53070]: CentOS Stream 8 - HighAvailability 36 kB/s | 3.9 kB 00:00\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 certmonger[53258]: Certificate in file \"/etc/pki/tls/certs/quadlet_demo.crt\" issued by CA and saved.\nJul 19 12:41:06 managed-node2 certmonger[53073]: 2025-07-19 12:41:06 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:06 managed-node2 dnf[53070]: Beaker Client - RedHatEnterpriseLinux8 8.8 kB/s | 1.5 kB 00:00\nJul 19 12:41:06 managed-node2 dnf[53070]: Beaker harness 13 kB/s | 1.3 kB 00:00\nJul 19 12:41:06 managed-node2 dnf[53070]: Copr repo for beakerlib-libraries owned by bgon 13 kB/s | 1.8 kB 00:00\nJul 19 12:41:06 managed-node2 dnf[53070]: Extra Packages for Enterprise Linux 8 - x86_64 413 kB/s | 35 kB 00:00\nJul 19 12:41:06 managed-node2 platform-python[53383]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt\nJul 19 12:41:07 managed-node2 dnf[53070]: Extra Packages for Enterprise Linux 8 - x86_64 281 kB/s | 21 kB 00:00\nJul 19 12:41:07 managed-node2 platform-python[53508]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key\nJul 19 12:41:07 managed-node2 dnf[53070]: Extra Packages for Enterprise Linux 8 - x86_64 81 kB/s | 25 kB 00:00\nJul 19 12:41:07 managed-node2 dnf[53070]: Copr repo for qa-tools owned by lpol 33 kB/s | 1.8 kB 00:00\nJul 19 12:41:07 managed-node2 platform-python[53632]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt\nJul 19 12:41:08 managed-node2 platform-python[53755]: ansible-command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:41:08 managed-node2 certmonger[53073]: 2025-07-19 12:41:08 [53073] Wrote to /var/lib/certmonger/requests/20250719164106\nJul 19 12:41:08 managed-node2 platform-python[53879]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:41:09 managed-node2 platform-python[54002]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:41:09 managed-node2 platform-python[54125]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None\nJul 19 12:41:09 managed-node2 dnf[53070]: Metadata cache created.\nJul 19 12:41:10 managed-node2 systemd[1]: dnf-makecache.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit dnf-makecache.service has successfully entered the 'dead' state.\nJul 19 12:41:10 managed-node2 systemd[1]: Started dnf makecache.\n-- Subject: Unit dnf-makecache.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit dnf-makecache.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 19 12:41:10 managed-node2 platform-python[54249]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:41:10 managed-node2 platform-python[54372]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:41:13 managed-node2 platform-python[54620]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:41:14 managed-node2 platform-python[54749]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 19 12:41:14 managed-node2 platform-python[54873]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:41:16 managed-node2 platform-python[54998]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:41:17 managed-node2 platform-python[55121]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:41:17 managed-node2 platform-python[55244]: ansible-command Invoked with _raw_params=systemctl is-system-running warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:41:18 managed-node2 platform-python[55368]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:41:21 managed-node2 platform-python[55491]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 19 12:41:21 managed-node2 platform-python[55618]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 19 12:41:22 managed-node2 platform-python[55745]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 19 12:41:23 managed-node2 platform-python[55868]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 19 12:41:25 managed-node2 platform-python[55991]: ansible-command Invoked with _raw_params=journalctl -ex warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:41:25 managed-node2 platform-python[56115]: ansible-command Invoked with _raw_params=podman ps -a warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:41:25 managed-node2 systemd[1]: var-lib-containers-storage-overlay-metacopy\\x2dcheck670583346-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-metacopy\\x2dcheck670583346-merged.mount has successfully entered the 'dead' state.\nJul 19 12:41:25 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 19 12:41:26 managed-node2 platform-python[56245]: ansible-command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:41:26 managed-node2 platform-python[56375]: ansible-command Invoked with _raw_params=set -euo pipefail; systemctl list-units --all | grep quadlet _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:41:26 managed-node2 platform-python[56501]: ansible-command Invoked with _raw_params=ls -alrtF /etc/systemd/system warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:41:29 managed-node2 platform-python[56750]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:41:30 managed-node2 platform-python[56879]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 19 12:41:32 managed-node2 platform-python[57004]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 19 12:41:35 managed-node2 platform-python[57127]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None\nJul 19 12:41:35 managed-node2 platform-python[57254]: ansible-systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None\nJul 19 12:41:36 managed-node2 platform-python[57381]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 19 12:41:36 managed-node2 platform-python[57504]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 19 12:41:38 managed-node2 platform-python[57627]: ansible-command Invoked with _raw_params=exec 1>&2\n set -x\n set -o pipefail\n systemctl list-units --plain -l --all | grep quadlet || :\n systemctl list-unit-files --all | grep quadlet || :\n systemctl list-units --plain --failed -l --all | grep quadlet || :\n _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 19 12:41:39 managed-node2 platform-python[57757]: ansible-command Invoked with _raw_params=journalctl -ex warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None", "task_name": "Get journald", "task_path": "/tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:209" } ] SYSTEM ROLES ERRORS END v1 TASKS RECAP ******************************************************************** Saturday 19 July 2025 12:41:39 -0400 (0:00:00.393) 0:00:45.920 ********* =============================================================================== fedora.linux_system_roles.certificate : Ensure provider packages are installed --- 4.40s /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:15 fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed --- 3.62s /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5 fedora.linux_system_roles.firewall : Install firewalld ------------------ 2.92s /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:51 fedora.linux_system_roles.firewall : Install firewalld ------------------ 2.81s /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:51 fedora.linux_system_roles.firewall : Configure firewall ----------------- 2.42s /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:74 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.83s /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.certificate : Remove files -------------------- 1.47s /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:174 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.46s /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.certificate : Slurp the contents of the files --- 1.40s /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:143 Gathering Facts --------------------------------------------------------- 1.06s /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9 fedora.linux_system_roles.firewall : Unmask firewalld service ----------- 1.03s /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:24 fedora.linux_system_roles.certificate : Ensure provider service is running --- 1.00s /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:76 fedora.linux_system_roles.firewall : Enable and start firewalld service --- 0.99s /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:30 fedora.linux_system_roles.certificate : Ensure certificate requests ----- 0.95s /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:86 Debug ------------------------------------------------------------------- 0.72s /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:199 fedora.linux_system_roles.certificate : Stop tracking certificates ------ 0.55s /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:169 fedora.linux_system_roles.podman : Get user information ----------------- 0.54s /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Dump journal ------------------------------------------------------------ 0.49s /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:142 fedora.linux_system_roles.certificate : Ensure pre-scripts hooks directory exists --- 0.48s /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:25 fedora.linux_system_roles.certificate : Check if system is ostree ------- 0.45s /tmp/collections-Qru/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:10