ansible-playbook [core 2.17.12]
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.12/site-packages/ansible
ansible collection location = /tmp/collections-AV4
executable location = /usr/local/bin/ansible-playbook
python version = 3.12.11 (main, Jun 4 2025, 00:00:00) [GCC 11.5.0 20240719 (Red Hat 11.5.0-7)] (/usr/bin/python3.12)
jinja version = 3.1.6
libyaml = True
No config file found; using defaults
running playbook inside collection fedora.linux_system_roles
Skipping callback 'debug', as we already have a stdout callback.
Skipping callback 'json', as we already have a stdout callback.
Skipping callback 'jsonl', as we already have a stdout callback.
Skipping callback 'default', as we already have a stdout callback.
Skipping callback 'minimal', as we already have a stdout callback.
Skipping callback 'oneline', as we already have a stdout callback.
PLAYBOOK: tests_quadlet_demo.yml ***********************************************
2 plays in /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml
PLAY [all] *********************************************************************
TASK [Include vault variables] *************************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:5
Monday 07 July 2025 20:15:38 -0400 (0:00:00.019) 0:00:00.019 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_test_password": {
"__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n"
},
"mysql_container_root_password": {
"__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n"
}
},
"ansible_included_var_files": [
"/tmp/podman-b9i/tests/vars/vault-variables.yml"
],
"changed": false
}
PLAY [Deploy the quadlet demo app] *********************************************
TASK [Gathering Facts] *********************************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9
Monday 07 July 2025 20:15:38 -0400 (0:00:00.038) 0:00:00.057 ***********
[WARNING]: Platform linux on host managed-node1 is using the discovered Python
interpreter at /usr/bin/python3.9, but future installation of another Python
interpreter could change the meaning of that path. See
https://docs.ansible.com/ansible-
core/2.17/reference_appendices/interpreter_discovery.html for more information.
ok: [managed-node1]
TASK [Test is only supported on x86_64] ****************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:38
Monday 07 July 2025 20:15:39 -0400 (0:00:01.034) 0:00:01.092 ***********
skipping: [managed-node1] => {
"false_condition": "ansible_facts[\"architecture\"] != \"x86_64\""
}
TASK [End test] ****************************************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:45
Monday 07 July 2025 20:15:39 -0400 (0:00:00.013) 0:00:01.106 ***********
META: end_play conditional evaluated to False, continuing play
skipping: [managed-node1] => {
"skip_reason": "end_play conditional evaluated to False, continuing play"
}
MSG:
end_play
TASK [Generate certificates] ***************************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:51
Monday 07 July 2025 20:15:39 -0400 (0:00:00.008) 0:00:01.114 ***********
included: fedora.linux_system_roles.certificate for managed-node1
TASK [fedora.linux_system_roles.certificate : Set version specific variables] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:2
Monday 07 July 2025 20:15:39 -0400 (0:00:00.039) 0:00:01.153 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml for managed-node1
TASK [fedora.linux_system_roles.certificate : Ensure ansible_facts used by role] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:2
Monday 07 July 2025 20:15:39 -0400 (0:00:00.024) 0:00:01.177 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__certificate_required_facts | difference(ansible_facts.keys() | list) | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.certificate : Check if system is ostree] *******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:10
Monday 07 July 2025 20:15:39 -0400 (0:00:00.036) 0:00:01.213 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.certificate : Set flag to indicate system is ostree] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:15
Monday 07 July 2025 20:15:40 -0400 (0:00:00.429) 0:00:01.643 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__certificate_is_ostree": false
},
"changed": false
}
TASK [fedora.linux_system_roles.certificate : Run systemctl] *******************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:22
Monday 07 July 2025 20:15:40 -0400 (0:00:00.023) 0:00:01.666 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": [
"systemctl",
"is-system-running"
],
"delta": "0:00:00.007955",
"end": "2025-07-07 20:15:40.776584",
"failed_when_result": false,
"rc": 0,
"start": "2025-07-07 20:15:40.768629"
}
STDOUT:
running
TASK [fedora.linux_system_roles.certificate : Require installed systemd] *******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:30
Monday 07 July 2025 20:15:40 -0400 (0:00:00.436) 0:00:02.103 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "\"No such file or directory\" in __is_system_running.msg | d(\"\")",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.certificate : Set flag to indicate that systemd runtime operations are available] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:35
Monday 07 July 2025 20:15:40 -0400 (0:00:00.036) 0:00:02.140 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__certificate_is_booted": true
},
"changed": false
}
TASK [fedora.linux_system_roles.certificate : Set platform/version specific variables] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:40
Monday 07 July 2025 20:15:40 -0400 (0:00:00.024) 0:00:02.164 ***********
skipping: [managed-node1] => (item=RedHat.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "RedHat.yml",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=CentOS.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "CentOS.yml",
"skip_reason": "Conditional result was False"
}
ok: [managed-node1] => (item=CentOS_9.yml) => {
"ansible_facts": {
"__certificate_certmonger_packages": [
"certmonger",
"python3-packaging"
]
},
"ansible_included_var_files": [
"/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/vars/CentOS_9.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "CentOS_9.yml"
}
ok: [managed-node1] => (item=CentOS_9.yml) => {
"ansible_facts": {
"__certificate_certmonger_packages": [
"certmonger",
"python3-packaging"
]
},
"ansible_included_var_files": [
"/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/vars/CentOS_9.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "CentOS_9.yml"
}
TASK [fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5
Monday 07 July 2025 20:15:40 -0400 (0:00:00.040) 0:00:02.205 ***********
ok: [managed-node1] => {
"changed": false,
"rc": 0,
"results": []
}
MSG:
Nothing to do
TASK [fedora.linux_system_roles.certificate : Ensure provider packages are installed] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:15
Monday 07 July 2025 20:15:42 -0400 (0:00:01.421) 0:00:03.626 ***********
ok: [managed-node1] => (item=certmonger) => {
"__certificate_provider": "certmonger",
"ansible_loop_var": "__certificate_provider",
"changed": false,
"rc": 0,
"results": []
}
MSG:
Nothing to do
TASK [fedora.linux_system_roles.certificate : Ensure pre-scripts hooks directory exists] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:25
Monday 07 July 2025 20:15:43 -0400 (0:00:01.327) 0:00:04.954 ***********
ok: [managed-node1] => (item=certmonger) => {
"__certificate_provider": "certmonger",
"ansible_loop_var": "__certificate_provider",
"changed": false,
"gid": 0,
"group": "root",
"mode": "0700",
"owner": "root",
"path": "/etc/certmonger//pre-scripts",
"secontext": "unconfined_u:object_r:etc_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.certificate : Ensure post-scripts hooks directory exists] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:49
Monday 07 July 2025 20:15:44 -0400 (0:00:00.574) 0:00:05.529 ***********
ok: [managed-node1] => (item=certmonger) => {
"__certificate_provider": "certmonger",
"ansible_loop_var": "__certificate_provider",
"changed": false,
"gid": 0,
"group": "root",
"mode": "0700",
"owner": "root",
"path": "/etc/certmonger//post-scripts",
"secontext": "unconfined_u:object_r:etc_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.certificate : Ensure provider service is running] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:76
Monday 07 July 2025 20:15:44 -0400 (0:00:00.445) 0:00:05.974 ***********
ok: [managed-node1] => (item=certmonger) => {
"__certificate_provider": "certmonger",
"ansible_loop_var": "__certificate_provider",
"changed": false,
"enabled": true,
"name": "certmonger",
"state": "started",
"status": {
"AccessSELinuxContext": "system_u:object_r:certmonger_unit_file_t:s0",
"ActiveEnterTimestamp": "Mon 2025-07-07 20:11:06 EDT",
"ActiveEnterTimestampMonotonic": "494646902",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "sysinit.target basic.target systemd-journald.socket syslog.target dbus-broker.service dbus.socket system.slice network.target",
"AllowIsolate": "no",
"AssertResult": "yes",
"AssertTimestamp": "Mon 2025-07-07 20:11:06 EDT",
"AssertTimestampMonotonic": "494625097",
"Before": "multi-user.target shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedorahosted.certmonger",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "407031000",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Mon 2025-07-07 20:11:06 EDT",
"ConditionTimestampMonotonic": "494625095",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroup": "/system.slice/certmonger.service",
"ControlGroupId": "4409",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "Certificate monitoring and PKI enrollment",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveCPUs": "0-1",
"EffectiveMemoryNodes": "0",
"EnvironmentFiles": "/etc/sysconfig/certmonger (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "12020",
"ExecMainStartTimestamp": "Mon 2025-07-07 20:11:06 EDT",
"ExecMainStartTimestampMonotonic": "494639128",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FinalKillSignal": "9",
"FragmentPath": "/usr/lib/systemd/system/certmonger.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "18446744073709551615",
"IOReadOperations": "18446744073709551615",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "18446744073709551615",
"IOWriteOperations": "18446744073709551615",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "certmonger.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Mon 2025-07-07 20:11:06 EDT",
"InactiveExitTimestampMonotonic": "494639522",
"InvocationID": "26ef44a06d534a7189bbceb9f6bb285c",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "13688",
"LimitNPROCSoft": "13688",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "13688",
"LimitSIGPENDINGSoft": "13688",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "12020",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "infinity",
"MemoryCurrent": "4030464",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "certmonger.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"PIDFile": "/run/certmonger.pid",
"PartOf": "dbus-broker.service",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "dbus.socket sysinit.target system.slice",
"Restart": "no",
"RestartKillSignal": "15",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestamp": "Mon 2025-07-07 20:13:12 EDT",
"StateChangeTimestampMonotonic": "620867739",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "1",
"TasksMax": "21900",
"TimeoutAbortUSec": "1min 30s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopFailureMode": "terminate",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "enabled",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.certificate : Ensure certificate requests] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:86
Monday 07 July 2025 20:15:45 -0400 (0:00:00.794) 0:00:06.769 ***********
changed: [managed-node1] => (item={'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}) => {
"ansible_loop_var": "item",
"changed": true,
"item": {
"ca": "self-sign",
"dns": [
"localhost"
],
"name": "quadlet_demo"
}
}
MSG:
Certificate requested (new).
TASK [fedora.linux_system_roles.certificate : Check if test mode is supported] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:138
Monday 07 July 2025 20:15:46 -0400 (0:00:00.971) 0:00:07.740 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __certificate_is_booted",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.certificate : Slurp the contents of the files] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:143
Monday 07 July 2025 20:15:46 -0400 (0:00:00.052) 0:00:07.793 ***********
ok: [managed-node1] => (item=['cert', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => {
"ansible_loop_var": "item",
"changed": false,
"content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnekNDQW11Z0F3SUJBZ0lSQUl6RldhQUlra05ocWhyeGRzUFFjeTh3RFFZSktvWklodmNOQVFFTEJRQXcKVURFZ01CNEdBMVVFQXd3WFRHOWpZV3dnVTJsbmJtbHVaeUJCZFhSb2IzSnBkSGt4TERBcUJnTlZCQU1NSXpoagpZelUxT1dFd0xUQTRPVEkwTXpZeExXRmhNV0ZtTVRjMkxXTXpaREEzTXpKa01CNFhEVEkxTURjd09EQXdNVFUwCk5sb1hEVEkyTURjd09EQXdNVEV3Tmxvd0ZERVNNQkFHQTFVRUF4TUpiRzlqWVd4b2IzTjBNSUlCSWpBTkJna3EKaGtpRzl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FRRUF2TUo0WEYxeUluL2tjY1BXOGJRbEEwdkY5d1VRcCtiMAphbytkZWJqc1lNeTU3czJzeWkvYTNZYmtzbWowcStHbFI4QndVZjI3dEFZbHVHM3hvY0ZSdTZVOW01aFR1c0xBCmUrL3N3bmQwalRSUjJ6Qm9wMDg3ZWxDT3kvbklJOWhIMk1oV296QndURmVnN3FOaVBwL3ZTS0tGZVczV0VGd2MKV1M1elk0NVhTZTBVU0hEZmh6VXR1aXBCUmVOQUQ1R21DbHFVbmp4bExqZlp6SmNCNTdIMVoxN0JtSmQzTnR3dQpMcEpUaTd4Z3Iza1M3azBzT3NJRUV5bU54MXkrSUFEdFdtS3MraXZWd3ZKYXdTMzZSYk9NY1ZLUjJ3TldHRGFvCms3YVBaSytCM1JGNGJTbzRyQmk1Q1ZjN2tPS0EyY25KSDFNY0V3eVJJcUtzSXV3SVBjeklQUUlEQVFBQm80R1QKTUlHUU1Bc0dBMVVkRHdRRUF3SUZvREFVQmdOVkhSRUVEVEFMZ2dsc2IyTmhiR2h2YzNRd0hRWURWUjBsQkJZdwpGQVlJS3dZQkJRVUhBd0VHQ0NzR0FRVUZCd01DTUF3R0ExVWRFd0VCL3dRQ01BQXdIUVlEVlIwT0JCWUVGUDdpCkdHOCttSmN4NW5HM08yUEFrdG83aW1mNE1COEdBMVVkSXdRWU1CYUFGUDlrazRwdWl1TTNhVkw2Vk5SMEsyZGIKUWdTcU1BMEdDU3FHU0liM0RRRUJDd1VBQTRJQkFRQlROSWhSeXFzY0QzWUJsQUg0WS9od1NOcWZBeHNOTTM0ZApTc01jY2ZNQnBFS3oxUkkrMlRPNWNoeExvV0ZvSDJXTlZUejZrTlJNSEVBRDlsaWFRdlp4NHJ3NmpNYVJNVURsClBKS1dTcGZoODE0bWNVU3hqMjJPTjl2S1dGL0xrNUJsS1YrcTRaN1BLSzE1TmV6M3VVZFRidWpVdWJjQ2NSRnQKOFdneWt5NERXdklYZURzbG9qNlNnSndrNVIzak5qU0xYUkEzclBNS0JWRmVMbzltR3VBd0tIdi96NXM2OHl6cQo0aVJ4K3BNVTVWazJPYVlZaVdpS1hIYkEvLzFVbmtvSFFDQnFnV1hlSFdEZ3EyZm9LVzhsVXZnMGdEaE0zVmE2CnUrV1JmS1ExOUtQUFJIR1pFQTZOZWdWWXZrT1VXZWIzYTYxSTdOS3N5OTNCNHFuQzA1dS8KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=",
"encoding": "base64",
"item": [
"cert",
{
"ca": "self-sign",
"dns": [
"localhost"
],
"name": "quadlet_demo"
}
],
"source": "/etc/pki/tls/certs/quadlet_demo.crt"
}
ok: [managed-node1] => (item=['key', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => {
"ansible_loop_var": "item",
"changed": false,
"content": "LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2UUlCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktjd2dnU2pBZ0VBQW9JQkFRQzh3bmhjWFhJaWYrUngKdzlieHRDVURTOFgzQlJDbjV2UnFqNTE1dU94Z3pMbnV6YXpLTDlyZGh1U3lhUFNyNGFWSHdIQlIvYnUwQmlXNApiZkdod1ZHN3BUMmJtRk82d3NCNzcrekNkM1NOTkZIYk1HaW5UenQ2VUk3TCtjZ2oyRWZZeUZhak1IQk1WNkR1Cm8ySStuKzlJb29WNWJkWVFYQnhaTG5OampsZEo3UlJJY04rSE5TMjZLa0ZGNDBBUGthWUtXcFNlUEdVdU45bk0KbHdIbnNmVm5Yc0dZbDNjMjNDNHVrbE9MdkdDdmVSTHVUU3c2d2dRVEtZM0hYTDRnQU8xYVlxejZLOVhDOGxyQgpMZnBGczR4eFVwSGJBMVlZTnFpVHRvOWtyNEhkRVhodEtqaXNHTGtKVnp1UTRvRFp5Y2tmVXh3VERKRWlvcXdpCjdBZzl6TWc5QWdNQkFBRUNnZ0VBRjZlRUowa0Rqa3k2SDBLVU8wNWdyQWpSR2x0Tllza1h2SW5hU1dCeDZOQ0gKQldUTFlEditnSy9RbFlYL0VWSVI0UXU5MTV5Rkt2SnhSQ3NzOG1FVVFjQmJsOVhQT2xLQkJoY0RNN3grQTlyNApYaEF6TVRKRjZSWlN5cjkydy8wc3hiOFhVbFIxNGk5Q0NOMjdLRTlwRUZCaml4dkhUVjRKcEdoa2grZVV5enZkCnZnaVkwWDNROW9naTJ5bnFIcWgwTDB1QmdVZGE4UWVJd0xYVWtkZVdEa0l4Wm4rdDl5R2hTc1JxdG5lNC96NGkKbFpYay84RW1HR04xQXhJb0o3WXNGakhjMjFiamRKTnJtMVhwRzJ0c0pZdUhpQzNxb3paK2dnMlpPYldQbGkxZgpYZTlxRWczRFlpblg0RFhPY2owbmZ1anhqdTZhcWczUCt4M0swU0lIWVFLQmdRRDVFc3RSSHg3bnBKUktUVU9KCklDVEdZRGMzNGYwNmFzR1l3b2Y0YTBtSGZNcjVpQ096U0ozSnNGaTZ3eHZkY0ljbkp5d2JaT0pWN2tLcWFoWmcKQ2o2SmRsSkxFVVhzb0M4MzhsRFF2djZuRUo5R3JWZTZkUE8vTzFSSUNSUlAwNmdzVDNnVlEvclFrdnA0S001dwozdkVTQXluZVllUThybFpwaFY2djI1L1ozUUtCZ1FEQ0Frb2szZERiZGZDeENaS1BNcHkzNWU4UVA1RWgyaW02CloyZlhxVllNcHVPdSt5N3EzSU1naVJ6ZGt2SFd5d3dFUFlCNlNrMEFkVHBpQk94T3JNM1VGbUFwdzdBVHIzVG0KRkRJUTdnT3J4RkZLSExFSGxqcFBKL3pPN1R4NUYyNklYaW9WbFZYcXBZaGMwaXZQVkdmV2JWY1dONy9CZmxQeApXdEJ2YWRjeDRRS0JnUURBZUFCN0FGUnVaaVJkUWxWazJsendtcHN3Nk1ZNDVSUVRDb2Z0dEdKT2UyV1I2OXpDCmh1Ty9YakZDcU5DWWpvTXlBemczTnY4YnJoRUFCUmlqUUdkUTdBanRqeng4Zk0vZ0tMVEFpa2JZTEtVWUJZMWMKUFZHNXZZd0VXbGZDMCtrSnpLTWJzN3RXZjYxak93MUQ5Vk5PT3ZtWXFnYW55WVJtNkRQaUt4SGNWUUtCZ0NxMwprbGUzdDJyNjl2b2tIMjhFTjltTitTdGNzN24rYlQycjB3TjdONFVsb2ZBL1FqOVlPazhKb1o5djRXdm4zVWQ1CnV6WXg4ZTV0NFdML3JvQk1NQzNJbDNmYXJJUkpGN1lwTURUbUUwell0d1ZZZCtveHU3aitGdjRBNGxYUFFQa3AKc3A1NStTYm5ON1VvV1prVy9wNEtDU2pOTVoyTEVUU05TR09mckZCQkFvR0FWYkJ1S01YdGM3eWxNS2xGK0svTgpvcGhXaEtHZHF2bnRabUtiTkl0TGpJWGlQWEJ4RlRBWnFzUjhtVHp3Y2ZrRitPc3R4RGplM1I5M0dVNzJaZnZlClExTHpUMmhRTkhLa0hkbjJjb2NhbnhUalY2b2cwaElwbGdINEpLK2NBM3BkVW5pWmZqTHc4TDBIY3cyZHB5NEwKZUVnTHZTMXJ4bXpVVmVHMFIyaGV5bXM9Ci0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K",
"encoding": "base64",
"item": [
"key",
{
"ca": "self-sign",
"dns": [
"localhost"
],
"name": "quadlet_demo"
}
],
"source": "/etc/pki/tls/private/quadlet_demo.key"
}
ok: [managed-node1] => (item=['ca', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => {
"ansible_loop_var": "item",
"changed": false,
"content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnekNDQW11Z0F3SUJBZ0lSQUl6RldhQUlra05ocWhyeGRzUFFjeTh3RFFZSktvWklodmNOQVFFTEJRQXcKVURFZ01CNEdBMVVFQXd3WFRHOWpZV3dnVTJsbmJtbHVaeUJCZFhSb2IzSnBkSGt4TERBcUJnTlZCQU1NSXpoagpZelUxT1dFd0xUQTRPVEkwTXpZeExXRmhNV0ZtTVRjMkxXTXpaREEzTXpKa01CNFhEVEkxTURjd09EQXdNVFUwCk5sb1hEVEkyTURjd09EQXdNVEV3Tmxvd0ZERVNNQkFHQTFVRUF4TUpiRzlqWVd4b2IzTjBNSUlCSWpBTkJna3EKaGtpRzl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FRRUF2TUo0WEYxeUluL2tjY1BXOGJRbEEwdkY5d1VRcCtiMAphbytkZWJqc1lNeTU3czJzeWkvYTNZYmtzbWowcStHbFI4QndVZjI3dEFZbHVHM3hvY0ZSdTZVOW01aFR1c0xBCmUrL3N3bmQwalRSUjJ6Qm9wMDg3ZWxDT3kvbklJOWhIMk1oV296QndURmVnN3FOaVBwL3ZTS0tGZVczV0VGd2MKV1M1elk0NVhTZTBVU0hEZmh6VXR1aXBCUmVOQUQ1R21DbHFVbmp4bExqZlp6SmNCNTdIMVoxN0JtSmQzTnR3dQpMcEpUaTd4Z3Iza1M3azBzT3NJRUV5bU54MXkrSUFEdFdtS3MraXZWd3ZKYXdTMzZSYk9NY1ZLUjJ3TldHRGFvCms3YVBaSytCM1JGNGJTbzRyQmk1Q1ZjN2tPS0EyY25KSDFNY0V3eVJJcUtzSXV3SVBjeklQUUlEQVFBQm80R1QKTUlHUU1Bc0dBMVVkRHdRRUF3SUZvREFVQmdOVkhSRUVEVEFMZ2dsc2IyTmhiR2h2YzNRd0hRWURWUjBsQkJZdwpGQVlJS3dZQkJRVUhBd0VHQ0NzR0FRVUZCd01DTUF3R0ExVWRFd0VCL3dRQ01BQXdIUVlEVlIwT0JCWUVGUDdpCkdHOCttSmN4NW5HM08yUEFrdG83aW1mNE1COEdBMVVkSXdRWU1CYUFGUDlrazRwdWl1TTNhVkw2Vk5SMEsyZGIKUWdTcU1BMEdDU3FHU0liM0RRRUJDd1VBQTRJQkFRQlROSWhSeXFzY0QzWUJsQUg0WS9od1NOcWZBeHNOTTM0ZApTc01jY2ZNQnBFS3oxUkkrMlRPNWNoeExvV0ZvSDJXTlZUejZrTlJNSEVBRDlsaWFRdlp4NHJ3NmpNYVJNVURsClBKS1dTcGZoODE0bWNVU3hqMjJPTjl2S1dGL0xrNUJsS1YrcTRaN1BLSzE1TmV6M3VVZFRidWpVdWJjQ2NSRnQKOFdneWt5NERXdklYZURzbG9qNlNnSndrNVIzak5qU0xYUkEzclBNS0JWRmVMbzltR3VBd0tIdi96NXM2OHl6cQo0aVJ4K3BNVTVWazJPYVlZaVdpS1hIYkEvLzFVbmtvSFFDQnFnV1hlSFdEZ3EyZm9LVzhsVXZnMGdEaE0zVmE2CnUrV1JmS1ExOUtQUFJIR1pFQTZOZWdWWXZrT1VXZWIzYTYxSTdOS3N5OTNCNHFuQzA1dS8KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=",
"encoding": "base64",
"item": [
"ca",
{
"ca": "self-sign",
"dns": [
"localhost"
],
"name": "quadlet_demo"
}
],
"source": "/etc/pki/tls/certs/quadlet_demo.crt"
}
TASK [fedora.linux_system_roles.certificate : Reset certificate_test_certs] ****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:151
Monday 07 July 2025 20:15:47 -0400 (0:00:01.177) 0:00:08.970 ***********
ok: [managed-node1] => {
"ansible_facts": {
"certificate_test_certs": {}
},
"changed": false
}
TASK [fedora.linux_system_roles.certificate : Create return data] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:155
Monday 07 July 2025 20:15:47 -0400 (0:00:00.036) 0:00:09.006 ***********
ok: [managed-node1] => (item=quadlet_demo) => {
"ansible_facts": {
"certificate_test_certs": {
"quadlet_demo": {
"ca": "/etc/pki/tls/certs/quadlet_demo.crt",
"ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAIzFWaAIkkNhqhrxdsPQcy8wDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMIzhj\nYzU1OWEwLTA4OTI0MzYxLWFhMWFmMTc2LWMzZDA3MzJkMB4XDTI1MDcwODAwMTU0\nNloXDTI2MDcwODAwMTEwNlowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvMJ4XF1yIn/kccPW8bQlA0vF9wUQp+b0\nao+debjsYMy57s2syi/a3Ybksmj0q+GlR8BwUf27tAYluG3xocFRu6U9m5hTusLA\ne+/swnd0jTRR2zBop087elCOy/nII9hH2MhWozBwTFeg7qNiPp/vSKKFeW3WEFwc\nWS5zY45XSe0USHDfhzUtuipBReNAD5GmClqUnjxlLjfZzJcB57H1Z17BmJd3Ntwu\nLpJTi7xgr3kS7k0sOsIEEymNx1y+IADtWmKs+ivVwvJawS36RbOMcVKR2wNWGDao\nk7aPZK+B3RF4bSo4rBi5CVc7kOKA2cnJH1McEwyRIqKsIuwIPczIPQIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFP7i\nGG8+mJcx5nG3O2PAkto7imf4MB8GA1UdIwQYMBaAFP9kk4puiuM3aVL6VNR0K2db\nQgSqMA0GCSqGSIb3DQEBCwUAA4IBAQBTNIhRyqscD3YBlAH4Y/hwSNqfAxsNM34d\nSsMccfMBpEKz1RI+2TO5chxLoWFoH2WNVTz6kNRMHEAD9liaQvZx4rw6jMaRMUDl\nPJKWSpfh814mcUSxj22ON9vKWF/Lk5BlKV+q4Z7PKK15Nez3uUdTbujUubcCcRFt\n8Wgyky4DWvIXeDsloj6SgJwk5R3jNjSLXRA3rPMKBVFeLo9mGuAwKHv/z5s68yzq\n4iRx+pMU5Vk2OaYYiWiKXHbA//1UnkoHQCBqgWXeHWDgq2foKW8lUvg0gDhM3Va6\nu+WRfKQ19KPPRHGZEA6NegVYvkOUWeb3a61I7NKsy93B4qnC05u/\n-----END CERTIFICATE-----\n",
"cert": "/etc/pki/tls/certs/quadlet_demo.crt",
"cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAIzFWaAIkkNhqhrxdsPQcy8wDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMIzhj\nYzU1OWEwLTA4OTI0MzYxLWFhMWFmMTc2LWMzZDA3MzJkMB4XDTI1MDcwODAwMTU0\nNloXDTI2MDcwODAwMTEwNlowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvMJ4XF1yIn/kccPW8bQlA0vF9wUQp+b0\nao+debjsYMy57s2syi/a3Ybksmj0q+GlR8BwUf27tAYluG3xocFRu6U9m5hTusLA\ne+/swnd0jTRR2zBop087elCOy/nII9hH2MhWozBwTFeg7qNiPp/vSKKFeW3WEFwc\nWS5zY45XSe0USHDfhzUtuipBReNAD5GmClqUnjxlLjfZzJcB57H1Z17BmJd3Ntwu\nLpJTi7xgr3kS7k0sOsIEEymNx1y+IADtWmKs+ivVwvJawS36RbOMcVKR2wNWGDao\nk7aPZK+B3RF4bSo4rBi5CVc7kOKA2cnJH1McEwyRIqKsIuwIPczIPQIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFP7i\nGG8+mJcx5nG3O2PAkto7imf4MB8GA1UdIwQYMBaAFP9kk4puiuM3aVL6VNR0K2db\nQgSqMA0GCSqGSIb3DQEBCwUAA4IBAQBTNIhRyqscD3YBlAH4Y/hwSNqfAxsNM34d\nSsMccfMBpEKz1RI+2TO5chxLoWFoH2WNVTz6kNRMHEAD9liaQvZx4rw6jMaRMUDl\nPJKWSpfh814mcUSxj22ON9vKWF/Lk5BlKV+q4Z7PKK15Nez3uUdTbujUubcCcRFt\n8Wgyky4DWvIXeDsloj6SgJwk5R3jNjSLXRA3rPMKBVFeLo9mGuAwKHv/z5s68yzq\n4iRx+pMU5Vk2OaYYiWiKXHbA//1UnkoHQCBqgWXeHWDgq2foKW8lUvg0gDhM3Va6\nu+WRfKQ19KPPRHGZEA6NegVYvkOUWeb3a61I7NKsy93B4qnC05u/\n-----END CERTIFICATE-----\n",
"key": "/etc/pki/tls/private/quadlet_demo.key",
"key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC8wnhcXXIif+Rx\nw9bxtCUDS8X3BRCn5vRqj515uOxgzLnuzazKL9rdhuSyaPSr4aVHwHBR/bu0BiW4\nbfGhwVG7pT2bmFO6wsB77+zCd3SNNFHbMGinTzt6UI7L+cgj2EfYyFajMHBMV6Du\no2I+n+9IooV5bdYQXBxZLnNjjldJ7RRIcN+HNS26KkFF40APkaYKWpSePGUuN9nM\nlwHnsfVnXsGYl3c23C4uklOLvGCveRLuTSw6wgQTKY3HXL4gAO1aYqz6K9XC8lrB\nLfpFs4xxUpHbA1YYNqiTto9kr4HdEXhtKjisGLkJVzuQ4oDZyckfUxwTDJEioqwi\n7Ag9zMg9AgMBAAECggEAF6eEJ0kDjky6H0KUO05grAjRGltNYskXvInaSWBx6NCH\nBWTLYDv+gK/QlYX/EVIR4Qu915yFKvJxRCss8mEUQcBbl9XPOlKBBhcDM7x+A9r4\nXhAzMTJF6RZSyr92w/0sxb8XUlR14i9CCN27KE9pEFBjixvHTV4JpGhkh+eUyzvd\nvgiY0X3Q9ogi2ynqHqh0L0uBgUda8QeIwLXUkdeWDkIxZn+t9yGhSsRqtne4/z4i\nlZXk/8EmGGN1AxIoJ7YsFjHc21bjdJNrm1XpG2tsJYuHiC3qozZ+gg2ZObWPli1f\nXe9qEg3DYinX4DXOcj0nfujxju6aqg3P+x3K0SIHYQKBgQD5EstRHx7npJRKTUOJ\nICTGYDc34f06asGYwof4a0mHfMr5iCOzSJ3JsFi6wxvdcIcnJywbZOJV7kKqahZg\nCj6JdlJLEUXsoC838lDQvv6nEJ9GrVe6dPO/O1RICRRP06gsT3gVQ/rQkvp4KM5w\n3vESAyneYeQ8rlZphV6v25/Z3QKBgQDCAkok3dDbdfCxCZKPMpy35e8QP5Eh2im6\nZ2fXqVYMpuOu+y7q3IMgiRzdkvHWywwEPYB6Sk0AdTpiBOxOrM3UFmApw7ATr3Tm\nFDIQ7gOrxFFKHLEHljpPJ/zO7Tx5F26IXioVlVXqpYhc0ivPVGfWbVcWN7/BflPx\nWtBvadcx4QKBgQDAeAB7AFRuZiRdQlVk2lzwmpsw6MY45RQTCofttGJOe2WR69zC\nhuO/XjFCqNCYjoMyAzg3Nv8brhEABRijQGdQ7Ajtjzx8fM/gKLTAikbYLKUYBY1c\nPVG5vYwEWlfC0+kJzKMbs7tWf61jOw1D9VNOOvmYqganyYRm6DPiKxHcVQKBgCq3\nkle3t2r69vokH28EN9mN+Stcs7n+bT2r0wN7N4UlofA/Qj9YOk8JoZ9v4Wvn3Ud5\nuzYx8e5t4WL/roBMMC3Il3farIRJF7YpMDTmE0zYtwVYd+oxu7j+Fv4A4lXPQPkp\nsp55+SbnN7UoWZkW/p4KCSjNMZ2LETSNSGOfrFBBAoGAVbBuKMXtc7ylMKlF+K/N\nophWhKGdqvntZmKbNItLjIXiPXBxFTAZqsR8mTzwcfkF+OstxDje3R93GU72Zfve\nQ1LzT2hQNHKkHdn2cocanxTjV6og0hIplgH4JK+cA3pdUniZfjLw8L0Hcw2dpy4L\neEgLvS1rxmzUVeG0R2heyms=\n-----END PRIVATE KEY-----\n"
}
}
},
"ansible_loop_var": "cert_name",
"cert_name": "quadlet_demo",
"changed": false
}
TASK [fedora.linux_system_roles.certificate : Stop tracking certificates] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:169
Monday 07 July 2025 20:15:47 -0400 (0:00:00.057) 0:00:09.063 ***********
ok: [managed-node1] => (item={'cert': '/etc/pki/tls/certs/quadlet_demo.crt', 'key': '/etc/pki/tls/private/quadlet_demo.key', 'ca': '/etc/pki/tls/certs/quadlet_demo.crt', 'cert_content': '-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAIzFWaAIkkNhqhrxdsPQcy8wDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMIzhj\nYzU1OWEwLTA4OTI0MzYxLWFhMWFmMTc2LWMzZDA3MzJkMB4XDTI1MDcwODAwMTU0\nNloXDTI2MDcwODAwMTEwNlowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvMJ4XF1yIn/kccPW8bQlA0vF9wUQp+b0\nao+debjsYMy57s2syi/a3Ybksmj0q+GlR8BwUf27tAYluG3xocFRu6U9m5hTusLA\ne+/swnd0jTRR2zBop087elCOy/nII9hH2MhWozBwTFeg7qNiPp/vSKKFeW3WEFwc\nWS5zY45XSe0USHDfhzUtuipBReNAD5GmClqUnjxlLjfZzJcB57H1Z17BmJd3Ntwu\nLpJTi7xgr3kS7k0sOsIEEymNx1y+IADtWmKs+ivVwvJawS36RbOMcVKR2wNWGDao\nk7aPZK+B3RF4bSo4rBi5CVc7kOKA2cnJH1McEwyRIqKsIuwIPczIPQIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFP7i\nGG8+mJcx5nG3O2PAkto7imf4MB8GA1UdIwQYMBaAFP9kk4puiuM3aVL6VNR0K2db\nQgSqMA0GCSqGSIb3DQEBCwUAA4IBAQBTNIhRyqscD3YBlAH4Y/hwSNqfAxsNM34d\nSsMccfMBpEKz1RI+2TO5chxLoWFoH2WNVTz6kNRMHEAD9liaQvZx4rw6jMaRMUDl\nPJKWSpfh814mcUSxj22ON9vKWF/Lk5BlKV+q4Z7PKK15Nez3uUdTbujUubcCcRFt\n8Wgyky4DWvIXeDsloj6SgJwk5R3jNjSLXRA3rPMKBVFeLo9mGuAwKHv/z5s68yzq\n4iRx+pMU5Vk2OaYYiWiKXHbA//1UnkoHQCBqgWXeHWDgq2foKW8lUvg0gDhM3Va6\nu+WRfKQ19KPPRHGZEA6NegVYvkOUWeb3a61I7NKsy93B4qnC05u/\n-----END CERTIFICATE-----\n', 'key_content': '-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC8wnhcXXIif+Rx\nw9bxtCUDS8X3BRCn5vRqj515uOxgzLnuzazKL9rdhuSyaPSr4aVHwHBR/bu0BiW4\nbfGhwVG7pT2bmFO6wsB77+zCd3SNNFHbMGinTzt6UI7L+cgj2EfYyFajMHBMV6Du\no2I+n+9IooV5bdYQXBxZLnNjjldJ7RRIcN+HNS26KkFF40APkaYKWpSePGUuN9nM\nlwHnsfVnXsGYl3c23C4uklOLvGCveRLuTSw6wgQTKY3HXL4gAO1aYqz6K9XC8lrB\nLfpFs4xxUpHbA1YYNqiTto9kr4HdEXhtKjisGLkJVzuQ4oDZyckfUxwTDJEioqwi\n7Ag9zMg9AgMBAAECggEAF6eEJ0kDjky6H0KUO05grAjRGltNYskXvInaSWBx6NCH\nBWTLYDv+gK/QlYX/EVIR4Qu915yFKvJxRCss8mEUQcBbl9XPOlKBBhcDM7x+A9r4\nXhAzMTJF6RZSyr92w/0sxb8XUlR14i9CCN27KE9pEFBjixvHTV4JpGhkh+eUyzvd\nvgiY0X3Q9ogi2ynqHqh0L0uBgUda8QeIwLXUkdeWDkIxZn+t9yGhSsRqtne4/z4i\nlZXk/8EmGGN1AxIoJ7YsFjHc21bjdJNrm1XpG2tsJYuHiC3qozZ+gg2ZObWPli1f\nXe9qEg3DYinX4DXOcj0nfujxju6aqg3P+x3K0SIHYQKBgQD5EstRHx7npJRKTUOJ\nICTGYDc34f06asGYwof4a0mHfMr5iCOzSJ3JsFi6wxvdcIcnJywbZOJV7kKqahZg\nCj6JdlJLEUXsoC838lDQvv6nEJ9GrVe6dPO/O1RICRRP06gsT3gVQ/rQkvp4KM5w\n3vESAyneYeQ8rlZphV6v25/Z3QKBgQDCAkok3dDbdfCxCZKPMpy35e8QP5Eh2im6\nZ2fXqVYMpuOu+y7q3IMgiRzdkvHWywwEPYB6Sk0AdTpiBOxOrM3UFmApw7ATr3Tm\nFDIQ7gOrxFFKHLEHljpPJ/zO7Tx5F26IXioVlVXqpYhc0ivPVGfWbVcWN7/BflPx\nWtBvadcx4QKBgQDAeAB7AFRuZiRdQlVk2lzwmpsw6MY45RQTCofttGJOe2WR69zC\nhuO/XjFCqNCYjoMyAzg3Nv8brhEABRijQGdQ7Ajtjzx8fM/gKLTAikbYLKUYBY1c\nPVG5vYwEWlfC0+kJzKMbs7tWf61jOw1D9VNOOvmYqganyYRm6DPiKxHcVQKBgCq3\nkle3t2r69vokH28EN9mN+Stcs7n+bT2r0wN7N4UlofA/Qj9YOk8JoZ9v4Wvn3Ud5\nuzYx8e5t4WL/roBMMC3Il3farIRJF7YpMDTmE0zYtwVYd+oxu7j+Fv4A4lXPQPkp\nsp55+SbnN7UoWZkW/p4KCSjNMZ2LETSNSGOfrFBBAoGAVbBuKMXtc7ylMKlF+K/N\nophWhKGdqvntZmKbNItLjIXiPXBxFTAZqsR8mTzwcfkF+OstxDje3R93GU72Zfve\nQ1LzT2hQNHKkHdn2cocanxTjV6og0hIplgH4JK+cA3pdUniZfjLw8L0Hcw2dpy4L\neEgLvS1rxmzUVeG0R2heyms=\n-----END PRIVATE KEY-----\n', 'ca_content': '-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAIzFWaAIkkNhqhrxdsPQcy8wDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMIzhj\nYzU1OWEwLTA4OTI0MzYxLWFhMWFmMTc2LWMzZDA3MzJkMB4XDTI1MDcwODAwMTU0\nNloXDTI2MDcwODAwMTEwNlowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvMJ4XF1yIn/kccPW8bQlA0vF9wUQp+b0\nao+debjsYMy57s2syi/a3Ybksmj0q+GlR8BwUf27tAYluG3xocFRu6U9m5hTusLA\ne+/swnd0jTRR2zBop087elCOy/nII9hH2MhWozBwTFeg7qNiPp/vSKKFeW3WEFwc\nWS5zY45XSe0USHDfhzUtuipBReNAD5GmClqUnjxlLjfZzJcB57H1Z17BmJd3Ntwu\nLpJTi7xgr3kS7k0sOsIEEymNx1y+IADtWmKs+ivVwvJawS36RbOMcVKR2wNWGDao\nk7aPZK+B3RF4bSo4rBi5CVc7kOKA2cnJH1McEwyRIqKsIuwIPczIPQIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFP7i\nGG8+mJcx5nG3O2PAkto7imf4MB8GA1UdIwQYMBaAFP9kk4puiuM3aVL6VNR0K2db\nQgSqMA0GCSqGSIb3DQEBCwUAA4IBAQBTNIhRyqscD3YBlAH4Y/hwSNqfAxsNM34d\nSsMccfMBpEKz1RI+2TO5chxLoWFoH2WNVTz6kNRMHEAD9liaQvZx4rw6jMaRMUDl\nPJKWSpfh814mcUSxj22ON9vKWF/Lk5BlKV+q4Z7PKK15Nez3uUdTbujUubcCcRFt\n8Wgyky4DWvIXeDsloj6SgJwk5R3jNjSLXRA3rPMKBVFeLo9mGuAwKHv/z5s68yzq\n4iRx+pMU5Vk2OaYYiWiKXHbA//1UnkoHQCBqgWXeHWDgq2foKW8lUvg0gDhM3Va6\nu+WRfKQ19KPPRHGZEA6NegVYvkOUWeb3a61I7NKsy93B4qnC05u/\n-----END CERTIFICATE-----\n'}) => {
"ansible_loop_var": "item",
"changed": false,
"cmd": [
"getcert",
"stop-tracking",
"-f",
"/etc/pki/tls/certs/quadlet_demo.crt"
],
"delta": "0:00:00.027722",
"end": "2025-07-07 20:15:48.123360",
"item": {
"ca": "/etc/pki/tls/certs/quadlet_demo.crt",
"ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAIzFWaAIkkNhqhrxdsPQcy8wDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMIzhj\nYzU1OWEwLTA4OTI0MzYxLWFhMWFmMTc2LWMzZDA3MzJkMB4XDTI1MDcwODAwMTU0\nNloXDTI2MDcwODAwMTEwNlowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvMJ4XF1yIn/kccPW8bQlA0vF9wUQp+b0\nao+debjsYMy57s2syi/a3Ybksmj0q+GlR8BwUf27tAYluG3xocFRu6U9m5hTusLA\ne+/swnd0jTRR2zBop087elCOy/nII9hH2MhWozBwTFeg7qNiPp/vSKKFeW3WEFwc\nWS5zY45XSe0USHDfhzUtuipBReNAD5GmClqUnjxlLjfZzJcB57H1Z17BmJd3Ntwu\nLpJTi7xgr3kS7k0sOsIEEymNx1y+IADtWmKs+ivVwvJawS36RbOMcVKR2wNWGDao\nk7aPZK+B3RF4bSo4rBi5CVc7kOKA2cnJH1McEwyRIqKsIuwIPczIPQIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFP7i\nGG8+mJcx5nG3O2PAkto7imf4MB8GA1UdIwQYMBaAFP9kk4puiuM3aVL6VNR0K2db\nQgSqMA0GCSqGSIb3DQEBCwUAA4IBAQBTNIhRyqscD3YBlAH4Y/hwSNqfAxsNM34d\nSsMccfMBpEKz1RI+2TO5chxLoWFoH2WNVTz6kNRMHEAD9liaQvZx4rw6jMaRMUDl\nPJKWSpfh814mcUSxj22ON9vKWF/Lk5BlKV+q4Z7PKK15Nez3uUdTbujUubcCcRFt\n8Wgyky4DWvIXeDsloj6SgJwk5R3jNjSLXRA3rPMKBVFeLo9mGuAwKHv/z5s68yzq\n4iRx+pMU5Vk2OaYYiWiKXHbA//1UnkoHQCBqgWXeHWDgq2foKW8lUvg0gDhM3Va6\nu+WRfKQ19KPPRHGZEA6NegVYvkOUWeb3a61I7NKsy93B4qnC05u/\n-----END CERTIFICATE-----\n",
"cert": "/etc/pki/tls/certs/quadlet_demo.crt",
"cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAIzFWaAIkkNhqhrxdsPQcy8wDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMIzhj\nYzU1OWEwLTA4OTI0MzYxLWFhMWFmMTc2LWMzZDA3MzJkMB4XDTI1MDcwODAwMTU0\nNloXDTI2MDcwODAwMTEwNlowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvMJ4XF1yIn/kccPW8bQlA0vF9wUQp+b0\nao+debjsYMy57s2syi/a3Ybksmj0q+GlR8BwUf27tAYluG3xocFRu6U9m5hTusLA\ne+/swnd0jTRR2zBop087elCOy/nII9hH2MhWozBwTFeg7qNiPp/vSKKFeW3WEFwc\nWS5zY45XSe0USHDfhzUtuipBReNAD5GmClqUnjxlLjfZzJcB57H1Z17BmJd3Ntwu\nLpJTi7xgr3kS7k0sOsIEEymNx1y+IADtWmKs+ivVwvJawS36RbOMcVKR2wNWGDao\nk7aPZK+B3RF4bSo4rBi5CVc7kOKA2cnJH1McEwyRIqKsIuwIPczIPQIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFP7i\nGG8+mJcx5nG3O2PAkto7imf4MB8GA1UdIwQYMBaAFP9kk4puiuM3aVL6VNR0K2db\nQgSqMA0GCSqGSIb3DQEBCwUAA4IBAQBTNIhRyqscD3YBlAH4Y/hwSNqfAxsNM34d\nSsMccfMBpEKz1RI+2TO5chxLoWFoH2WNVTz6kNRMHEAD9liaQvZx4rw6jMaRMUDl\nPJKWSpfh814mcUSxj22ON9vKWF/Lk5BlKV+q4Z7PKK15Nez3uUdTbujUubcCcRFt\n8Wgyky4DWvIXeDsloj6SgJwk5R3jNjSLXRA3rPMKBVFeLo9mGuAwKHv/z5s68yzq\n4iRx+pMU5Vk2OaYYiWiKXHbA//1UnkoHQCBqgWXeHWDgq2foKW8lUvg0gDhM3Va6\nu+WRfKQ19KPPRHGZEA6NegVYvkOUWeb3a61I7NKsy93B4qnC05u/\n-----END CERTIFICATE-----\n",
"key": "/etc/pki/tls/private/quadlet_demo.key",
"key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC8wnhcXXIif+Rx\nw9bxtCUDS8X3BRCn5vRqj515uOxgzLnuzazKL9rdhuSyaPSr4aVHwHBR/bu0BiW4\nbfGhwVG7pT2bmFO6wsB77+zCd3SNNFHbMGinTzt6UI7L+cgj2EfYyFajMHBMV6Du\no2I+n+9IooV5bdYQXBxZLnNjjldJ7RRIcN+HNS26KkFF40APkaYKWpSePGUuN9nM\nlwHnsfVnXsGYl3c23C4uklOLvGCveRLuTSw6wgQTKY3HXL4gAO1aYqz6K9XC8lrB\nLfpFs4xxUpHbA1YYNqiTto9kr4HdEXhtKjisGLkJVzuQ4oDZyckfUxwTDJEioqwi\n7Ag9zMg9AgMBAAECggEAF6eEJ0kDjky6H0KUO05grAjRGltNYskXvInaSWBx6NCH\nBWTLYDv+gK/QlYX/EVIR4Qu915yFKvJxRCss8mEUQcBbl9XPOlKBBhcDM7x+A9r4\nXhAzMTJF6RZSyr92w/0sxb8XUlR14i9CCN27KE9pEFBjixvHTV4JpGhkh+eUyzvd\nvgiY0X3Q9ogi2ynqHqh0L0uBgUda8QeIwLXUkdeWDkIxZn+t9yGhSsRqtne4/z4i\nlZXk/8EmGGN1AxIoJ7YsFjHc21bjdJNrm1XpG2tsJYuHiC3qozZ+gg2ZObWPli1f\nXe9qEg3DYinX4DXOcj0nfujxju6aqg3P+x3K0SIHYQKBgQD5EstRHx7npJRKTUOJ\nICTGYDc34f06asGYwof4a0mHfMr5iCOzSJ3JsFi6wxvdcIcnJywbZOJV7kKqahZg\nCj6JdlJLEUXsoC838lDQvv6nEJ9GrVe6dPO/O1RICRRP06gsT3gVQ/rQkvp4KM5w\n3vESAyneYeQ8rlZphV6v25/Z3QKBgQDCAkok3dDbdfCxCZKPMpy35e8QP5Eh2im6\nZ2fXqVYMpuOu+y7q3IMgiRzdkvHWywwEPYB6Sk0AdTpiBOxOrM3UFmApw7ATr3Tm\nFDIQ7gOrxFFKHLEHljpPJ/zO7Tx5F26IXioVlVXqpYhc0ivPVGfWbVcWN7/BflPx\nWtBvadcx4QKBgQDAeAB7AFRuZiRdQlVk2lzwmpsw6MY45RQTCofttGJOe2WR69zC\nhuO/XjFCqNCYjoMyAzg3Nv8brhEABRijQGdQ7Ajtjzx8fM/gKLTAikbYLKUYBY1c\nPVG5vYwEWlfC0+kJzKMbs7tWf61jOw1D9VNOOvmYqganyYRm6DPiKxHcVQKBgCq3\nkle3t2r69vokH28EN9mN+Stcs7n+bT2r0wN7N4UlofA/Qj9YOk8JoZ9v4Wvn3Ud5\nuzYx8e5t4WL/roBMMC3Il3farIRJF7YpMDTmE0zYtwVYd+oxu7j+Fv4A4lXPQPkp\nsp55+SbnN7UoWZkW/p4KCSjNMZ2LETSNSGOfrFBBAoGAVbBuKMXtc7ylMKlF+K/N\nophWhKGdqvntZmKbNItLjIXiPXBxFTAZqsR8mTzwcfkF+OstxDje3R93GU72Zfve\nQ1LzT2hQNHKkHdn2cocanxTjV6og0hIplgH4JK+cA3pdUniZfjLw8L0Hcw2dpy4L\neEgLvS1rxmzUVeG0R2heyms=\n-----END PRIVATE KEY-----\n"
},
"rc": 0,
"start": "2025-07-07 20:15:48.095638"
}
STDOUT:
Request "20250708001546" removed.
TASK [fedora.linux_system_roles.certificate : Remove files] ********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:174
Monday 07 July 2025 20:15:48 -0400 (0:00:00.391) 0:00:09.454 ***********
changed: [managed-node1] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => {
"ansible_loop_var": "item",
"changed": true,
"item": "/etc/pki/tls/certs/quadlet_demo.crt",
"path": "/etc/pki/tls/certs/quadlet_demo.crt",
"state": "absent"
}
changed: [managed-node1] => (item=/etc/pki/tls/private/quadlet_demo.key) => {
"ansible_loop_var": "item",
"changed": true,
"item": "/etc/pki/tls/private/quadlet_demo.key",
"path": "/etc/pki/tls/private/quadlet_demo.key",
"state": "absent"
}
ok: [managed-node1] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => {
"ansible_loop_var": "item",
"changed": false,
"item": "/etc/pki/tls/certs/quadlet_demo.crt",
"path": "/etc/pki/tls/certs/quadlet_demo.crt",
"state": "absent"
}
TASK [Run the role] ************************************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:62
Monday 07 July 2025 20:15:49 -0400 (0:00:01.050) 0:00:10.505 ***********
included: fedora.linux_system_roles.podman for managed-node1
TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3
Monday 07 July 2025 20:15:49 -0400 (0:00:00.063) 0:00:10.569 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] ****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3
Monday 07 July 2025 20:15:49 -0400 (0:00:00.024) 0:00:10.593 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11
Monday 07 July 2025 20:15:49 -0400 (0:00:00.037) 0:00:10.630 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16
Monday 07 July 2025 20:15:49 -0400 (0:00:00.344) 0:00:10.974 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_is_ostree": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23
Monday 07 July 2025 20:15:49 -0400 (0:00:00.023) 0:00:10.998 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28
Monday 07 July 2025 20:15:50 -0400 (0:00:00.342) 0:00:11.340 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_is_transactional": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32
Monday 07 July 2025 20:15:50 -0400 (0:00:00.023) 0:00:11.364 ***********
ok: [managed-node1] => (item=RedHat.yml) => {
"ansible_facts": {
"__podman_packages": [
"podman",
"shadow-utils-subid"
]
},
"ansible_included_var_files": [
"/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "RedHat.yml"
}
skipping: [managed-node1] => (item=CentOS.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "CentOS.yml",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=CentOS_9.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "CentOS_9.yml",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=CentOS_9.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "CentOS_9.yml",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Gather the package facts] *************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6
Monday 07 July 2025 20:15:50 -0400 (0:00:00.038) 0:00:11.403 ***********
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Enable copr if requested] *************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10
Monday 07 July 2025 20:15:51 -0400 (0:00:00.994) 0:00:12.398 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_use_copr | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14
Monday 07 July 2025 20:15:51 -0400 (0:00:00.048) 0:00:12.446 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "(__podman_packages | difference(ansible_facts.packages)) | list | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28
Monday 07 July 2025 20:15:51 -0400 (0:00:00.054) 0:00:12.500 ***********
skipping: [managed-node1] => {
"false_condition": "__podman_is_transactional | d(false)"
}
TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33
Monday 07 July 2025 20:15:51 -0400 (0:00:00.047) 0:00:12.547 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38
Monday 07 July 2025 20:15:51 -0400 (0:00:00.049) 0:00:12.596 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get podman version] *******************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46
Monday 07 July 2025 20:15:51 -0400 (0:00:00.046) 0:00:12.643 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"--version"
],
"delta": "0:00:00.026319",
"end": "2025-07-07 20:15:51.685464",
"rc": 0,
"start": "2025-07-07 20:15:51.659145"
}
STDOUT:
podman version 5.5.1
TASK [fedora.linux_system_roles.podman : Set podman version] *******************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52
Monday 07 July 2025 20:15:51 -0400 (0:00:00.385) 0:00:13.029 ***********
ok: [managed-node1] => {
"ansible_facts": {
"podman_version": "5.5.1"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56
Monday 07 July 2025 20:15:51 -0400 (0:00:00.032) 0:00:13.061 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_version is version(\"4.2\", \"<\")",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63
Monday 07 July 2025 20:15:51 -0400 (0:00:00.033) 0:00:13.095 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_version is version(\"4.4\", \"<\")",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73
Monday 07 July 2025 20:15:51 -0400 (0:00:00.086) 0:00:13.181 ***********
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
"skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}
MSG:
end_host conditional evaluated to false, continuing execution for managed-node1
TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80
Monday 07 July 2025 20:15:52 -0400 (0:00:00.112) 0:00:13.294 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96
Monday 07 July 2025 20:15:52 -0400 (0:00:00.044) 0:00:13.338 ***********
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
"skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}
MSG:
end_host conditional evaluated to false, continuing execution for managed-node1
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109
Monday 07 July 2025 20:15:52 -0400 (0:00:00.047) 0:00:13.386 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:15:52 -0400 (0:00:00.060) 0:00:13.446 ***********
ok: [managed-node1] => {
"ansible_facts": {
"getent_passwd": {
"root": [
"x",
"0",
"0",
"root",
"/root",
"/bin/bash"
]
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:15:52 -0400 (0:00:00.452) 0:00:13.899 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:15:52 -0400 (0:00:00.035) 0:00:13.934 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:15:52 -0400 (0:00:00.042) 0:00:13.977 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1751933454.6873221,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "8bedde2dbca15219e1a3b95a68a8c0d26a92ba62",
"ctime": 1751933427.4892416,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 665568,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1748273472.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15496,
"uid": 0,
"version": "4278445899",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:15:53 -0400 (0:00:00.365) 0:00:14.343 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:15:53 -0400 (0:00:00.033) 0:00:14.376 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:15:53 -0400 (0:00:00.034) 0:00:14.410 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:15:53 -0400 (0:00:00.033) 0:00:14.444 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:15:53 -0400 (0:00:00.034) 0:00:14.478 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:15:53 -0400 (0:00:00.033) 0:00:14.511 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:15:53 -0400 (0:00:00.034) 0:00:14.546 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:15:53 -0400 (0:00:00.033) 0:00:14.579 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set config file paths] ****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115
Monday 07 July 2025 20:15:53 -0400 (0:00:00.032) 0:00:14.612 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf",
"__podman_parent_mode": "0755",
"__podman_parent_path": "/etc/containers",
"__podman_policy_json_file": "/etc/containers/policy.json",
"__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf",
"__podman_storage_conf_file": "/etc/containers/storage.conf"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle container.conf.d] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126
Monday 07 July 2025 20:15:53 -0400 (0:00:00.100) 0:00:14.712 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] ***********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5
Monday 07 July 2025 20:15:53 -0400 (0:00:00.060) 0:00:14.772 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_containers_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update container config file] *********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13
Monday 07 July 2025 20:15:53 -0400 (0:00:00.041) 0:00:14.814 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_containers_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] *************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129
Monday 07 July 2025 20:15:53 -0400 (0:00:00.032) 0:00:14.846 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] ***********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5
Monday 07 July 2025 20:15:53 -0400 (0:00:00.058) 0:00:14.905 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_registries_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update registries config file] ********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13
Monday 07 July 2025 20:15:53 -0400 (0:00:00.031) 0:00:14.937 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_registries_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle storage.conf] ******************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132
Monday 07 July 2025 20:15:53 -0400 (0:00:00.030) 0:00:14.968 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7
Monday 07 July 2025 20:15:53 -0400 (0:00:00.058) 0:00:15.027 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_storage_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update storage config file] ***********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15
Monday 07 July 2025 20:15:53 -0400 (0:00:00.029) 0:00:15.056 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_storage_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle policy.json] *******************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135
Monday 07 July 2025 20:15:53 -0400 (0:00:00.031) 0:00:15.088 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8
Monday 07 July 2025 20:15:53 -0400 (0:00:00.060) 0:00:15.148 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16
Monday 07 July 2025 20:15:53 -0400 (0:00:00.030) 0:00:15.179 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get the existing policy.json] *********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21
Monday 07 July 2025 20:15:53 -0400 (0:00:00.030) 0:00:15.210 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Write new policy.json file] ***********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27
Monday 07 July 2025 20:15:53 -0400 (0:00:00.030) 0:00:15.240 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [Manage firewall for specified ports] *************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141
Monday 07 July 2025 20:15:53 -0400 (0:00:00.030) 0:00:15.270 ***********
included: fedora.linux_system_roles.firewall for managed-node1
TASK [fedora.linux_system_roles.firewall : Setup firewalld] ********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2
Monday 07 July 2025 20:15:54 -0400 (0:00:00.132) 0:00:15.403 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node1
TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2
Monday 07 July 2025 20:15:54 -0400 (0:00:00.058) 0:00:15.461 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Check if system is ostree] **********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10
Monday 07 July 2025 20:15:54 -0400 (0:00:00.037) 0:00:15.499 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15
Monday 07 July 2025 20:15:54 -0400 (0:00:00.357) 0:00:15.856 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__firewall_is_ostree": false
},
"changed": false
}
TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22
Monday 07 July 2025 20:15:54 -0400 (0:00:00.036) 0:00:15.893 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27
Monday 07 July 2025 20:15:54 -0400 (0:00:00.353) 0:00:16.246 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__firewall_is_transactional": false
},
"changed": false
}
TASK [fedora.linux_system_roles.firewall : Run systemctl] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:34
Monday 07 July 2025 20:15:55 -0400 (0:00:00.035) 0:00:16.282 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": [
"systemctl",
"is-system-running"
],
"delta": "0:00:00.008494",
"end": "2025-07-07 20:15:55.306872",
"failed_when_result": false,
"rc": 0,
"start": "2025-07-07 20:15:55.298378"
}
STDOUT:
running
TASK [fedora.linux_system_roles.firewall : Require installed systemd] **********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:41
Monday 07 July 2025 20:15:55 -0400 (0:00:00.364) 0:00:16.647 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "\"No such file or directory\" in __is_system_running.msg | d(\"\")",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Set flag to indicate that systemd runtime operations are available] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:46
Monday 07 July 2025 20:15:55 -0400 (0:00:00.034) 0:00:16.681 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__firewall_is_booted": true
},
"changed": false
}
TASK [fedora.linux_system_roles.firewall : Install firewalld] ******************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:51
Monday 07 July 2025 20:15:55 -0400 (0:00:00.034) 0:00:16.715 ***********
ok: [managed-node1] => {
"changed": false,
"rc": 0,
"results": []
}
MSG:
Nothing to do
TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:63
Monday 07 July 2025 20:15:56 -0400 (0:00:01.305) 0:00:18.021 ***********
skipping: [managed-node1] => {
"false_condition": "__firewall_is_transactional | d(false)"
}
TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:68
Monday 07 July 2025 20:15:56 -0400 (0:00:00.031) 0:00:18.052 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:73
Monday 07 July 2025 20:15:56 -0400 (0:00:00.030) 0:00:18.083 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Check which conflicting services are enabled] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5
Monday 07 July 2025 20:15:56 -0400 (0:00:00.030) 0:00:18.114 ***********
skipping: [managed-node1] => (item=nftables) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "nftables",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=iptables) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "iptables",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=ufw) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "ufw",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:14
Monday 07 July 2025 20:15:56 -0400 (0:00:00.035) 0:00:18.149 ***********
skipping: [managed-node1] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'nftables', 'ansible_loop_var': 'item'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "nftables",
"skip_reason": "Conditional result was False",
"skipped": true
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'iptables', 'ansible_loop_var': 'item'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "iptables",
"skip_reason": "Conditional result was False",
"skipped": true
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'ufw', 'ansible_loop_var': 'item'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "ufw",
"skip_reason": "Conditional result was False",
"skipped": true
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] ***********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:24
Monday 07 July 2025 20:15:56 -0400 (0:00:00.038) 0:00:18.188 ***********
ok: [managed-node1] => {
"changed": false,
"name": "firewalld",
"status": {
"AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0",
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "dbus.socket polkit.service basic.target system.slice dbus-broker.service sysinit.target",
"AllowIsolate": "no",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target network-pre.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedoraproject.FirewallD1",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "iptables.service ip6tables.service ipset.service shutdown.target ebtables.service",
"ControlGroupId": "0",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "firewalld - dynamic firewall daemon",
"DevicePolicy": "auto",
"Documentation": "\"man:firewalld(1)\"",
"DynamicUser": "no",
"EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FinalKillSignal": "9",
"FragmentPath": "/usr/lib/systemd/system/firewalld.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "18446744073709551615",
"IOReadOperations": "18446744073709551615",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "18446744073709551615",
"IOWriteOperations": "18446744073709551615",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "firewalld.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "13688",
"LimitNPROCSoft": "13688",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "13688",
"LimitSIGPENDINGSoft": "13688",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "infinity",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "firewalld.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "dbus.socket system.slice sysinit.target",
"Restart": "no",
"RestartKillSignal": "15",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardOutput": "null",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "21900",
"TimeoutAbortUSec": "1min 30s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopFailureMode": "terminate",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "enabled",
"UnitFileState": "disabled",
"UtmpMode": "init",
"Wants": "network-pre.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "infinity"
}
}
TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:30
Monday 07 July 2025 20:15:57 -0400 (0:00:00.514) 0:00:18.703 ***********
changed: [managed-node1] => {
"changed": true,
"enabled": true,
"name": "firewalld",
"state": "started",
"status": {
"AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0",
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "sysinit.target dbus-broker.service polkit.service dbus.socket basic.target system.slice",
"AllowIsolate": "no",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target network-pre.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedoraproject.FirewallD1",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "ebtables.service iptables.service shutdown.target ip6tables.service ipset.service",
"ControlGroupId": "0",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "firewalld - dynamic firewall daemon",
"DevicePolicy": "auto",
"Documentation": "\"man:firewalld(1)\"",
"DynamicUser": "no",
"EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FinalKillSignal": "9",
"FragmentPath": "/usr/lib/systemd/system/firewalld.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "18446744073709551615",
"IOReadOperations": "18446744073709551615",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "18446744073709551615",
"IOWriteOperations": "18446744073709551615",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "firewalld.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "13688",
"LimitNPROCSoft": "13688",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "13688",
"LimitSIGPENDINGSoft": "13688",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "infinity",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "firewalld.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "dbus.socket system.slice sysinit.target",
"Restart": "no",
"RestartKillSignal": "15",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardOutput": "null",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "21900",
"TimeoutAbortUSec": "1min 30s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopFailureMode": "terminate",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "enabled",
"UnitFileState": "disabled",
"UtmpMode": "init",
"Wants": "network-pre.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "infinity"
}
}
TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:36
Monday 07 July 2025 20:15:58 -0400 (0:00:01.010) 0:00:19.713 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__firewall_previous_replaced": false,
"__firewall_python_cmd": "/usr/bin/python3.9",
"__firewall_report_changed": true
},
"changed": false
}
TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:45
Monday 07 July 2025 20:15:58 -0400 (0:00:00.049) 0:00:19.762 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:58
Monday 07 July 2025 20:15:58 -0400 (0:00:00.040) 0:00:19.803 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Configure firewall] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:74
Monday 07 July 2025 20:15:58 -0400 (0:00:00.034) 0:00:19.837 ***********
changed: [managed-node1] => (item={'port': '8000/tcp', 'state': 'enabled'}) => {
"__firewall_changed": true,
"ansible_loop_var": "item",
"changed": true,
"item": {
"port": "8000/tcp",
"state": "enabled"
}
}
changed: [managed-node1] => (item={'port': '9000/tcp', 'state': 'enabled'}) => {
"__firewall_changed": true,
"ansible_loop_var": "item",
"changed": true,
"item": {
"port": "9000/tcp",
"state": "enabled"
}
}
TASK [fedora.linux_system_roles.firewall : Gather firewall config information] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:126
Monday 07 July 2025 20:15:59 -0400 (0:00:01.275) 0:00:21.113 ***********
skipping: [managed-node1] => (item={'port': '8000/tcp', 'state': 'enabled'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall | length == 1",
"item": {
"port": "8000/tcp",
"state": "enabled"
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item={'port': '9000/tcp', 'state': 'enabled'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall | length == 1",
"item": {
"port": "9000/tcp",
"state": "enabled"
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:137
Monday 07 July 2025 20:15:59 -0400 (0:00:00.079) 0:00:21.192 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall | length == 1",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:146
Monday 07 July 2025 20:15:59 -0400 (0:00:00.043) 0:00:21.236 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall == None or firewall | length == 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:152
Monday 07 July 2025 20:15:59 -0400 (0:00:00.039) 0:00:21.275 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall == None or firewall | length == 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:161
Monday 07 July 2025 20:16:00 -0400 (0:00:00.041) 0:00:21.317 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Calculate what has changed] *********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:172
Monday 07 July 2025 20:16:00 -0400 (0:00:00.038) 0:00:21.355 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Show diffs] *************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:178
Monday 07 July 2025 20:16:00 -0400 (0:00:00.034) 0:00:21.389 ***********
skipping: [managed-node1] => {
"false_condition": "__firewall_previous_replaced | bool"
}
TASK [Manage selinux for specified ports] **************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148
Monday 07 July 2025 20:16:00 -0400 (0:00:00.048) 0:00:21.437 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_selinux_ports | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155
Monday 07 July 2025 20:16:00 -0400 (0:00:00.033) 0:00:21.471 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_cancel_user_linger": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] *******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159
Monday 07 July 2025 20:16:00 -0400 (0:00:00.032) 0:00:21.503 ***********
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle credential files - present] ****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168
Monday 07 July 2025 20:16:00 -0400 (0:00:00.034) 0:00:21.537 ***********
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle secrets] ***********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177
Monday 07 July 2025 20:16:00 -0400 (0:00:00.033) 0:00:21.570 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Monday 07 July 2025 20:16:00 -0400 (0:00:00.214) 0:00:21.785 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Monday 07 July 2025 20:16:00 -0400 (0:00:00.058) 0:00:21.844 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:16:00 -0400 (0:00:00.116) 0:00:21.960 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:16:00 -0400 (0:00:00.043) 0:00:22.004 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:16:00 -0400 (0:00:00.039) 0:00:22.044 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:16:00 -0400 (0:00:00.043) 0:00:22.087 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:16:00 -0400 (0:00:00.032) 0:00:22.120 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:16:00 -0400 (0:00:00.032) 0:00:22.152 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:16:00 -0400 (0:00:00.034) 0:00:22.187 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:16:00 -0400 (0:00:00.038) 0:00:22.225 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:16:01 -0400 (0:00:00.053) 0:00:22.279 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:16:01 -0400 (0:00:00.046) 0:00:22.326 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:16:01 -0400 (0:00:00.036) 0:00:22.362 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:16:01 -0400 (0:00:00.053) 0:00:22.416 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14
Monday 07 July 2025 20:16:01 -0400 (0:00:00.054) 0:00:22.471 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20
Monday 07 July 2025 20:16:01 -0400 (0:00:00.065) 0:00:22.537 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 07 July 2025 20:16:01 -0400 (0:00:00.068) 0:00:22.605 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 07 July 2025 20:16:01 -0400 (0:00:00.035) 0:00:22.640 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 07 July 2025 20:16:01 -0400 (0:00:00.030) 0:00:22.671 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25
Monday 07 July 2025 20:16:01 -0400 (0:00:00.069) 0:00:22.740 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41
Monday 07 July 2025 20:16:01 -0400 (0:00:00.030) 0:00:22.771 ***********
[WARNING]: Using a variable for a task's 'args' is unsafe in some situations
(see
https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat-
unsafe)
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Monday 07 July 2025 20:16:02 -0400 (0:00:00.674) 0:00:23.445 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Monday 07 July 2025 20:16:02 -0400 (0:00:00.044) 0:00:23.489 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:16:02 -0400 (0:00:00.070) 0:00:23.559 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:16:02 -0400 (0:00:00.037) 0:00:23.597 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:16:02 -0400 (0:00:00.035) 0:00:23.632 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:16:02 -0400 (0:00:00.043) 0:00:23.676 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:16:02 -0400 (0:00:00.033) 0:00:23.710 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:16:02 -0400 (0:00:00.032) 0:00:23.743 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:16:02 -0400 (0:00:00.032) 0:00:23.775 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:16:02 -0400 (0:00:00.035) 0:00:23.811 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:16:02 -0400 (0:00:00.053) 0:00:23.864 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:16:02 -0400 (0:00:00.053) 0:00:23.918 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:16:02 -0400 (0:00:00.039) 0:00:23.958 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:16:02 -0400 (0:00:00.044) 0:00:24.002 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14
Monday 07 July 2025 20:16:02 -0400 (0:00:00.041) 0:00:24.044 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20
Monday 07 July 2025 20:16:02 -0400 (0:00:00.047) 0:00:24.091 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 07 July 2025 20:16:02 -0400 (0:00:00.095) 0:00:24.187 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 07 July 2025 20:16:02 -0400 (0:00:00.030) 0:00:24.217 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 07 July 2025 20:16:02 -0400 (0:00:00.032) 0:00:24.249 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25
Monday 07 July 2025 20:16:03 -0400 (0:00:00.035) 0:00:24.285 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41
Monday 07 July 2025 20:16:03 -0400 (0:00:00.048) 0:00:24.333 ***********
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Monday 07 July 2025 20:16:03 -0400 (0:00:00.539) 0:00:24.873 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Monday 07 July 2025 20:16:03 -0400 (0:00:00.060) 0:00:24.933 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:16:03 -0400 (0:00:00.109) 0:00:25.042 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:16:03 -0400 (0:00:00.064) 0:00:25.107 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:16:03 -0400 (0:00:00.055) 0:00:25.162 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:16:03 -0400 (0:00:00.075) 0:00:25.238 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:16:04 -0400 (0:00:00.053) 0:00:25.292 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:16:04 -0400 (0:00:00.037) 0:00:25.330 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:16:04 -0400 (0:00:00.040) 0:00:25.371 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:16:04 -0400 (0:00:00.042) 0:00:25.413 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:16:04 -0400 (0:00:00.038) 0:00:25.451 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:16:04 -0400 (0:00:00.032) 0:00:25.484 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:16:04 -0400 (0:00:00.034) 0:00:25.518 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:16:04 -0400 (0:00:00.032) 0:00:25.551 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14
Monday 07 July 2025 20:16:04 -0400 (0:00:00.074) 0:00:25.625 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20
Monday 07 July 2025 20:16:04 -0400 (0:00:00.044) 0:00:25.670 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 07 July 2025 20:16:04 -0400 (0:00:00.078) 0:00:25.748 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 07 July 2025 20:16:04 -0400 (0:00:00.040) 0:00:25.788 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 07 July 2025 20:16:04 -0400 (0:00:00.040) 0:00:25.829 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25
Monday 07 July 2025 20:16:04 -0400 (0:00:00.036) 0:00:25.865 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41
Monday 07 July 2025 20:16:04 -0400 (0:00:00.032) 0:00:25.897 ***********
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184
Monday 07 July 2025 20:16:05 -0400 (0:00:00.500) 0:00:26.398 ***********
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191
Monday 07 July 2025 20:16:05 -0400 (0:00:00.033) 0:00:26.431 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 07 July 2025 20:16:05 -0400 (0:00:00.198) 0:00:26.629 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo.network",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Network]\nSubnet=192.168.30.0/24\nGateway=192.168.30.1\nLabel=app=wordpress",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 07 July 2025 20:16:05 -0400 (0:00:00.051) 0:00:26.681 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 07 July 2025 20:16:05 -0400 (0:00:00.042) 0:00:26.724 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 07 July 2025 20:16:05 -0400 (0:00:00.032) 0:00:26.756 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "network",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 07 July 2025 20:16:05 -0400 (0:00:00.045) 0:00:26.802 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:16:05 -0400 (0:00:00.057) 0:00:26.859 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:16:05 -0400 (0:00:00.089) 0:00:26.949 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:16:05 -0400 (0:00:00.038) 0:00:26.987 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:16:05 -0400 (0:00:00.051) 0:00:27.038 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1751933454.6873221,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "8bedde2dbca15219e1a3b95a68a8c0d26a92ba62",
"ctime": 1751933427.4892416,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 665568,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1748273472.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15496,
"uid": 0,
"version": "4278445899",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:16:06 -0400 (0:00:00.384) 0:00:27.423 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:16:06 -0400 (0:00:00.037) 0:00:27.461 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:16:06 -0400 (0:00:00.034) 0:00:27.495 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:16:06 -0400 (0:00:00.032) 0:00:27.528 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:16:06 -0400 (0:00:00.033) 0:00:27.561 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:16:06 -0400 (0:00:00.032) 0:00:27.594 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:16:06 -0400 (0:00:00.037) 0:00:27.631 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:16:06 -0400 (0:00:00.047) 0:00:27.679 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 07 July 2025 20:16:06 -0400 (0:00:00.056) 0:00:27.736 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "quadlet-demo-network.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 07 July 2025 20:16:06 -0400 (0:00:00.059) 0:00:27.795 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 07 July 2025 20:16:06 -0400 (0:00:00.040) 0:00:27.836 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_kube_yamls_raw | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88
Monday 07 July 2025 20:16:06 -0400 (0:00:00.042) 0:00:27.878 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.network",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106
Monday 07 July 2025 20:16:06 -0400 (0:00:00.078) 0:00:27.957 ***********
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113
Monday 07 July 2025 20:16:06 -0400 (0:00:00.039) 0:00:27.996 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state == \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:117
Monday 07 July 2025 20:16:06 -0400 (0:00:00.029) 0:00:28.026 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Monday 07 July 2025 20:16:06 -0400 (0:00:00.145) 0:00:28.171 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 07 July 2025 20:16:06 -0400 (0:00:00.092) 0:00:28.264 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 07 July 2025 20:16:07 -0400 (0:00:00.051) 0:00:28.316 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 07 July 2025 20:16:07 -0400 (0:00:00.052) 0:00:28.368 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Monday 07 July 2025 20:16:07 -0400 (0:00:00.050) 0:00:28.419 ***********
skipping: [managed-node1] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Monday 07 July 2025 20:16:07 -0400 (0:00:00.051) 0:00:28.470 ***********
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Monday 07 July 2025 20:16:07 -0400 (0:00:00.053) 0:00:28.524 ***********
ok: [managed-node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 90,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:50
Monday 07 July 2025 20:16:07 -0400 (0:00:00.440) 0:00:28.964 ***********
changed: [managed-node1] => {
"changed": true,
"checksum": "e57c08d49aff4bae8daab138d913aeddaa8682a0",
"dest": "/etc/containers/systemd/quadlet-demo.network",
"gid": 0,
"group": "root",
"md5sum": "061f3cf318cbd8ab5794bb1173831fb8",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 74,
"src": "/root/.ansible/tmp/ansible-tmp-1751933767.7445745-18733-5242759390053/.source.network",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:62
Monday 07 July 2025 20:16:08 -0400 (0:00:00.774) 0:00:29.739 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75
Monday 07 July 2025 20:16:08 -0400 (0:00:00.051) 0:00:29.790 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_copy_file is skipped",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87
Monday 07 July 2025 20:16:08 -0400 (0:00:00.053) 0:00:29.844 ***********
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115
Monday 07 July 2025 20:16:09 -0400 (0:00:00.728) 0:00:30.572 ***********
changed: [managed-node1] => {
"changed": true,
"name": "quadlet-demo-network.service",
"state": "started",
"status": {
"AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "-.mount basic.target network-online.target system.slice sysinit.target systemd-journald.socket",
"AllowIsolate": "no",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroupId": "0",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "quadlet-demo-network.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FinalKillSignal": "9",
"FragmentPath": "/run/systemd/generator/quadlet-demo-network.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "18446744073709551615",
"IOReadOperations": "18446744073709551615",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "18446744073709551615",
"IOWriteOperations": "18446744073709551615",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "quadlet-demo-network.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "13688",
"LimitNPROCSoft": "13688",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "13688",
"LimitSIGPENDINGSoft": "13688",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "infinity",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo-network.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "yes",
"RemoveIPC": "no",
"Requires": "system.slice -.mount sysinit.target",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartKillSignal": "15",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo.network",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo-network",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "21900",
"TimeoutAbortUSec": "1min 30s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "infinity",
"TimeoutStopFailureMode": "terminate",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "oneshot",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"Wants": "network-online.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "infinity"
}
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:131
Monday 07 July 2025 20:16:09 -0400 (0:00:00.601) 0:00:31.174 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_service_started is changed",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 07 July 2025 20:16:09 -0400 (0:00:00.051) 0:00:31.226 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo-mysql.volume",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Volume]",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 07 July 2025 20:16:09 -0400 (0:00:00.045) 0:00:31.272 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 07 July 2025 20:16:10 -0400 (0:00:00.040) 0:00:31.312 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 07 July 2025 20:16:10 -0400 (0:00:00.032) 0:00:31.344 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo-mysql",
"__podman_quadlet_type": "volume",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 07 July 2025 20:16:10 -0400 (0:00:00.049) 0:00:31.394 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:16:10 -0400 (0:00:00.127) 0:00:31.521 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:16:10 -0400 (0:00:00.057) 0:00:31.579 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:16:10 -0400 (0:00:00.063) 0:00:31.643 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:16:10 -0400 (0:00:00.072) 0:00:31.715 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1751933454.6873221,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "8bedde2dbca15219e1a3b95a68a8c0d26a92ba62",
"ctime": 1751933427.4892416,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 665568,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1748273472.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15496,
"uid": 0,
"version": "4278445899",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:16:10 -0400 (0:00:00.388) 0:00:32.103 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:16:10 -0400 (0:00:00.034) 0:00:32.138 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:16:10 -0400 (0:00:00.034) 0:00:32.173 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:16:10 -0400 (0:00:00.041) 0:00:32.214 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:16:11 -0400 (0:00:00.065) 0:00:32.280 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:16:11 -0400 (0:00:00.057) 0:00:32.337 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:16:11 -0400 (0:00:00.057) 0:00:32.394 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:16:11 -0400 (0:00:00.057) 0:00:32.452 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 07 July 2025 20:16:11 -0400 (0:00:00.058) 0:00:32.510 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "quadlet-demo-mysql-volume.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 07 July 2025 20:16:11 -0400 (0:00:00.089) 0:00:32.599 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 07 July 2025 20:16:11 -0400 (0:00:00.060) 0:00:32.660 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_kube_yamls_raw | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88
Monday 07 July 2025 20:16:11 -0400 (0:00:00.058) 0:00:32.719 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.volume",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106
Monday 07 July 2025 20:16:11 -0400 (0:00:00.126) 0:00:32.846 ***********
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113
Monday 07 July 2025 20:16:11 -0400 (0:00:00.064) 0:00:32.910 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state == \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:117
Monday 07 July 2025 20:16:11 -0400 (0:00:00.053) 0:00:32.964 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Monday 07 July 2025 20:16:11 -0400 (0:00:00.167) 0:00:33.131 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 07 July 2025 20:16:11 -0400 (0:00:00.094) 0:00:33.226 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 07 July 2025 20:16:12 -0400 (0:00:00.056) 0:00:33.282 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 07 July 2025 20:16:12 -0400 (0:00:00.060) 0:00:33.342 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Monday 07 July 2025 20:16:12 -0400 (0:00:00.051) 0:00:33.394 ***********
skipping: [managed-node1] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Monday 07 July 2025 20:16:12 -0400 (0:00:00.050) 0:00:33.444 ***********
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Monday 07 July 2025 20:16:12 -0400 (0:00:00.054) 0:00:33.499 ***********
ok: [managed-node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 118,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:50
Monday 07 July 2025 20:16:12 -0400 (0:00:00.433) 0:00:33.932 ***********
changed: [managed-node1] => {
"changed": true,
"checksum": "585f8cbdf0ec73000f9227dcffbef71e9552ea4a",
"dest": "/etc/containers/systemd/quadlet-demo-mysql.volume",
"gid": 0,
"group": "root",
"md5sum": "5ddd03a022aeb4502d9bc8ce436b4233",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 9,
"src": "/root/.ansible/tmp/ansible-tmp-1751933772.715237-18900-74746594643675/.source.volume",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:62
Monday 07 July 2025 20:16:13 -0400 (0:00:00.750) 0:00:34.683 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75
Monday 07 July 2025 20:16:13 -0400 (0:00:00.058) 0:00:34.741 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_copy_file is skipped",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87
Monday 07 July 2025 20:16:13 -0400 (0:00:00.052) 0:00:34.794 ***********
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115
Monday 07 July 2025 20:16:14 -0400 (0:00:00.737) 0:00:35.532 ***********
changed: [managed-node1] => {
"changed": true,
"name": "quadlet-demo-mysql-volume.service",
"state": "started",
"status": {
"AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "systemd-journald.socket network-online.target basic.target sysinit.target -.mount system.slice",
"AllowIsolate": "no",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroupId": "0",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "quadlet-demo-mysql-volume.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FinalKillSignal": "9",
"FragmentPath": "/run/systemd/generator/quadlet-demo-mysql-volume.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "18446744073709551615",
"IOReadOperations": "18446744073709551615",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "18446744073709551615",
"IOWriteOperations": "18446744073709551615",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "quadlet-demo-mysql-volume.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "13688",
"LimitNPROCSoft": "13688",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "13688",
"LimitSIGPENDINGSoft": "13688",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "infinity",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo-mysql-volume.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "yes",
"RemoveIPC": "no",
"Requires": "sysinit.target -.mount system.slice",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartKillSignal": "15",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.volume",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo-mysql-volume",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "21900",
"TimeoutAbortUSec": "1min 30s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "infinity",
"TimeoutStopFailureMode": "terminate",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "oneshot",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"Wants": "network-online.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "infinity"
}
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:131
Monday 07 July 2025 20:16:14 -0400 (0:00:00.589) 0:00:36.121 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_service_started is changed",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 07 July 2025 20:16:14 -0400 (0:00:00.042) 0:00:36.164 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Container]\nImage=quay.io/linux-system-roles/mysql:5.6\nContainerName=quadlet-demo-mysql\nVolume=quadlet-demo-mysql.volume:/var/lib/mysql\nVolume=/tmp/quadlet_demo:/var/lib/quadlet_demo:Z\nNetwork=quadlet-demo.network\nSecret=mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD\nHealthCmd=/bin/true\nHealthOnFailure=kill\n",
"__podman_quadlet_template_src": "quadlet-demo-mysql.container.j2"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 07 July 2025 20:16:14 -0400 (0:00:00.087) 0:00:36.252 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 07 July 2025 20:16:15 -0400 (0:00:00.040) 0:00:36.293 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_str",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 07 July 2025 20:16:15 -0400 (0:00:00.040) 0:00:36.333 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo-mysql",
"__podman_quadlet_type": "container",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 07 July 2025 20:16:15 -0400 (0:00:00.083) 0:00:36.417 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:16:15 -0400 (0:00:00.157) 0:00:36.574 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:16:15 -0400 (0:00:00.045) 0:00:36.620 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:16:15 -0400 (0:00:00.043) 0:00:36.663 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:16:15 -0400 (0:00:00.054) 0:00:36.718 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1751933454.6873221,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "8bedde2dbca15219e1a3b95a68a8c0d26a92ba62",
"ctime": 1751933427.4892416,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 665568,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1748273472.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15496,
"uid": 0,
"version": "4278445899",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:16:15 -0400 (0:00:00.390) 0:00:37.108 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:16:15 -0400 (0:00:00.054) 0:00:37.163 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:16:15 -0400 (0:00:00.057) 0:00:37.221 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:16:15 -0400 (0:00:00.055) 0:00:37.276 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:16:16 -0400 (0:00:00.056) 0:00:37.332 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:16:16 -0400 (0:00:00.054) 0:00:37.387 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:16:16 -0400 (0:00:00.066) 0:00:37.454 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:16:16 -0400 (0:00:00.054) 0:00:37.508 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 07 July 2025 20:16:16 -0400 (0:00:00.056) 0:00:37.564 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [
"quay.io/linux-system-roles/mysql:5.6"
],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "quadlet-demo-mysql.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 07 July 2025 20:16:16 -0400 (0:00:00.090) 0:00:37.655 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 07 July 2025 20:16:16 -0400 (0:00:00.057) 0:00:37.712 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_kube_yamls_raw | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88
Monday 07 July 2025 20:16:16 -0400 (0:00:00.056) 0:00:37.768 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [
"quay.io/linux-system-roles/mysql:5.6"
],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.container",
"__podman_volumes": [
"/tmp/quadlet_demo"
]
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106
Monday 07 July 2025 20:16:16 -0400 (0:00:00.120) 0:00:37.889 ***********
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113
Monday 07 July 2025 20:16:16 -0400 (0:00:00.066) 0:00:37.956 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state == \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:117
Monday 07 July 2025 20:16:16 -0400 (0:00:00.049) 0:00:38.005 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Monday 07 July 2025 20:16:16 -0400 (0:00:00.130) 0:00:38.136 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 07 July 2025 20:16:16 -0400 (0:00:00.058) 0:00:38.195 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 07 July 2025 20:16:16 -0400 (0:00:00.031) 0:00:38.226 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 07 July 2025 20:16:16 -0400 (0:00:00.030) 0:00:38.257 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Monday 07 July 2025 20:16:17 -0400 (0:00:00.031) 0:00:38.289 ***********
changed: [managed-node1] => (item=/tmp/quadlet_demo) => {
"ansible_loop_var": "item",
"changed": true,
"gid": 0,
"group": "root",
"item": "/tmp/quadlet_demo",
"mode": "0777",
"owner": "root",
"path": "/tmp/quadlet_demo",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Monday 07 July 2025 20:16:17 -0400 (0:00:00.423) 0:00:38.712 ***********
changed: [managed-node1] => (item=None) => {
"attempts": 1,
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Monday 07 July 2025 20:16:24 -0400 (0:00:06.671) 0:00:45.383 ***********
ok: [managed-node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 151,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:50
Monday 07 July 2025 20:16:24 -0400 (0:00:00.442) 0:00:45.826 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_quadlet_file_src | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:62
Monday 07 July 2025 20:16:24 -0400 (0:00:00.059) 0:00:45.886 ***********
changed: [managed-node1] => {
"changed": true,
"checksum": "ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4",
"dest": "/etc/containers/systemd/quadlet-demo-mysql.container",
"gid": 0,
"group": "root",
"md5sum": "341b473056d2a5dfa35970b0d2e23a5d",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 363,
"src": "/root/.ansible/tmp/ansible-tmp-1751933784.6711764-19256-243645638611479/.source.container",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75
Monday 07 July 2025 20:16:25 -0400 (0:00:00.733) 0:00:46.619 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_copy_content is skipped",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87
Monday 07 July 2025 20:16:25 -0400 (0:00:00.064) 0:00:46.683 ***********
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115
Monday 07 July 2025 20:16:26 -0400 (0:00:00.704) 0:00:47.388 ***********
changed: [managed-node1] => {
"changed": true,
"name": "quadlet-demo-mysql.service",
"state": "started",
"status": {
"AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "system.slice -.mount basic.target sysinit.target quadlet-demo-mysql-volume.service tmp.mount network-online.target systemd-journald.socket quadlet-demo-network.service",
"AllowIsolate": "no",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "multi-user.target shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroupId": "0",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "yes",
"DelegateControllers": "cpu cpuacct cpuset io blkio memory devices pids bpf-firewall bpf-devices bpf-foreign bpf-socket-bind bpf-restrict-network-interfaces",
"Description": "quadlet-demo-mysql.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FinalKillSignal": "9",
"FragmentPath": "/run/systemd/generator/quadlet-demo-mysql.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "18446744073709551615",
"IOReadOperations": "18446744073709551615",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "18446744073709551615",
"IOWriteOperations": "18446744073709551615",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "quadlet-demo-mysql.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "13688",
"LimitNPROCSoft": "13688",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "13688",
"LimitSIGPENDINGSoft": "13688",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "infinity",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo-mysql.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "all",
"OOMPolicy": "continue",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "quadlet-demo-network.service quadlet-demo-mysql-volume.service sysinit.target system.slice -.mount",
"RequiresMountsFor": "/run/containers /tmp/quadlet_demo",
"Restart": "no",
"RestartKillSignal": "15",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.container",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo-mysql",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "21900",
"TimeoutAbortUSec": "1min 30s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopFailureMode": "terminate",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "notify",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "network-online.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "infinity"
}
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:131
Monday 07 July 2025 20:16:26 -0400 (0:00:00.874) 0:00:48.263 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_service_started is changed",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 07 July 2025 20:16:27 -0400 (0:00:00.055) 0:00:48.318 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "envoy-proxy-configmap.yml",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "---\napiVersion: v1\nkind: ConfigMap\nmetadata:\n name: envoy-proxy-config\ndata:\n envoy.yaml: |\n admin:\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 9901\n\n static_resources:\n listeners:\n - name: listener_0\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 8080\n filter_chains:\n - filters:\n - name: envoy.filters.network.http_connection_manager\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager\n stat_prefix: ingress_http\n codec_type: AUTO\n route_config:\n name: local_route\n virtual_hosts:\n - name: local_service\n domains: [\"*\"]\n routes:\n - match:\n prefix: \"/\"\n route:\n cluster: backend\n http_filters:\n - name: envoy.filters.http.router\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router\n transport_socket:\n name: envoy.transport_sockets.tls\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext\n common_tls_context:\n tls_certificates:\n - certificate_chain:\n filename: /etc/envoy-certificates/certificate.pem\n private_key:\n filename: /etc/envoy-certificates/certificate.key\n clusters:\n - name: backend\n connect_timeout: 5s\n type: STATIC\n dns_refresh_rate: 1800s\n lb_policy: ROUND_ROBIN\n load_assignment:\n cluster_name: backend\n endpoints:\n - lb_endpoints:\n - endpoint:\n address:\n socket_address:\n address: 127.0.0.1\n port_value: 80",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 07 July 2025 20:16:27 -0400 (0:00:00.072) 0:00:48.391 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 07 July 2025 20:16:27 -0400 (0:00:00.065) 0:00:48.457 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 07 July 2025 20:16:27 -0400 (0:00:00.053) 0:00:48.510 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "envoy-proxy-configmap",
"__podman_quadlet_type": "yml",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 07 July 2025 20:16:27 -0400 (0:00:00.131) 0:00:48.641 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:16:27 -0400 (0:00:00.118) 0:00:48.759 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:16:27 -0400 (0:00:00.058) 0:00:48.818 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:16:27 -0400 (0:00:00.059) 0:00:48.877 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:16:27 -0400 (0:00:00.049) 0:00:48.927 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1751933454.6873221,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "8bedde2dbca15219e1a3b95a68a8c0d26a92ba62",
"ctime": 1751933427.4892416,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 665568,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1748273472.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15496,
"uid": 0,
"version": "4278445899",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:16:28 -0400 (0:00:00.440) 0:00:49.368 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:16:28 -0400 (0:00:00.063) 0:00:49.431 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:16:28 -0400 (0:00:00.052) 0:00:49.484 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:16:28 -0400 (0:00:00.059) 0:00:49.544 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:16:28 -0400 (0:00:00.056) 0:00:49.601 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:16:28 -0400 (0:00:00.040) 0:00:49.641 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:16:28 -0400 (0:00:00.042) 0:00:49.683 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:16:28 -0400 (0:00:00.054) 0:00:49.737 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 07 July 2025 20:16:28 -0400 (0:00:00.039) 0:00:49.777 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 07 July 2025 20:16:28 -0400 (0:00:00.057) 0:00:49.835 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 07 July 2025 20:16:28 -0400 (0:00:00.037) 0:00:49.872 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_kube_yamls_raw | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88
Monday 07 July 2025 20:16:28 -0400 (0:00:00.035) 0:00:49.908 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/envoy-proxy-configmap.yml",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106
Monday 07 July 2025 20:16:28 -0400 (0:00:00.103) 0:00:50.011 ***********
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113
Monday 07 July 2025 20:16:28 -0400 (0:00:00.065) 0:00:50.076 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state == \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:117
Monday 07 July 2025 20:16:28 -0400 (0:00:00.109) 0:00:50.186 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Monday 07 July 2025 20:16:29 -0400 (0:00:00.116) 0:00:50.303 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 07 July 2025 20:16:29 -0400 (0:00:00.094) 0:00:50.397 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 07 July 2025 20:16:29 -0400 (0:00:00.055) 0:00:50.453 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 07 July 2025 20:16:29 -0400 (0:00:00.051) 0:00:50.504 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Monday 07 July 2025 20:16:29 -0400 (0:00:00.053) 0:00:50.557 ***********
skipping: [managed-node1] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Monday 07 July 2025 20:16:29 -0400 (0:00:00.050) 0:00:50.608 ***********
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Monday 07 July 2025 20:16:29 -0400 (0:00:00.054) 0:00:50.663 ***********
ok: [managed-node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 187,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:50
Monday 07 July 2025 20:16:29 -0400 (0:00:00.449) 0:00:51.112 ***********
changed: [managed-node1] => {
"changed": true,
"checksum": "d681c7d56f912150d041873e880818b22a90c188",
"dest": "/etc/containers/systemd/envoy-proxy-configmap.yml",
"gid": 0,
"group": "root",
"md5sum": "aec75d972c231aac004e1338934544cf",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 2102,
"src": "/root/.ansible/tmp/ansible-tmp-1751933789.8840272-19439-56658192497812/.source.yml",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:62
Monday 07 July 2025 20:16:30 -0400 (0:00:00.742) 0:00:51.855 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75
Monday 07 July 2025 20:16:30 -0400 (0:00:00.063) 0:00:51.918 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_copy_file is skipped",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87
Monday 07 July 2025 20:16:30 -0400 (0:00:00.060) 0:00:51.979 ***********
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115
Monday 07 July 2025 20:16:31 -0400 (0:00:00.728) 0:00:52.708 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_service_name | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:131
Monday 07 July 2025 20:16:31 -0400 (0:00:00.039) 0:00:52.747 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_service_name | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 07 July 2025 20:16:31 -0400 (0:00:00.039) 0:00:52.786 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "---\napiVersion: v1\nkind: PersistentVolumeClaim\nmetadata:\n name: wp-pv-claim\n labels:\n app: wordpress\nspec:\n accessModes:\n - ReadWriteOnce\n resources:\n requests:\n storage: 20Gi\n---\napiVersion: v1\nkind: Pod\nmetadata:\n name: quadlet-demo\nspec:\n containers:\n - name: wordpress\n image: quay.io/linux-system-roles/wordpress:4.8-apache\n env:\n - name: WORDPRESS_DB_HOST\n value: quadlet-demo-mysql\n - name: WORDPRESS_DB_PASSWORD\n valueFrom:\n secretKeyRef:\n name: mysql-root-password-kube\n key: password\n volumeMounts:\n - name: wordpress-persistent-storage\n mountPath: /var/www/html\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n - name: envoy\n image: quay.io/linux-system-roles/envoyproxy:v1.25.0\n volumeMounts:\n - name: config-volume\n mountPath: /etc/envoy\n - name: certificates\n mountPath: /etc/envoy-certificates\n env:\n - name: ENVOY_UID\n value: \"0\"\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n volumes:\n - name: config-volume\n configMap:\n name: envoy-proxy-config\n - name: certificates\n secret:\n secretName: envoy-certificates\n - name: wordpress-persistent-storage\n persistentVolumeClaim:\n claimName: wp-pv-claim\n - name: www # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3\n - name: create # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3-create\n",
"__podman_quadlet_template_src": "quadlet-demo.yml.j2"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 07 July 2025 20:16:31 -0400 (0:00:00.148) 0:00:52.935 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 07 July 2025 20:16:31 -0400 (0:00:00.086) 0:00:53.021 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_str",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 07 July 2025 20:16:31 -0400 (0:00:00.069) 0:00:53.091 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "yml",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 07 July 2025 20:16:31 -0400 (0:00:00.141) 0:00:53.232 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:16:32 -0400 (0:00:00.104) 0:00:53.336 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:16:32 -0400 (0:00:00.063) 0:00:53.400 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:16:32 -0400 (0:00:00.059) 0:00:53.459 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:16:32 -0400 (0:00:00.075) 0:00:53.534 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1751933454.6873221,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "8bedde2dbca15219e1a3b95a68a8c0d26a92ba62",
"ctime": 1751933427.4892416,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 665568,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1748273472.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15496,
"uid": 0,
"version": "4278445899",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:16:32 -0400 (0:00:00.420) 0:00:53.955 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:16:32 -0400 (0:00:00.043) 0:00:53.999 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:16:32 -0400 (0:00:00.033) 0:00:54.032 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:16:32 -0400 (0:00:00.042) 0:00:54.075 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:16:32 -0400 (0:00:00.048) 0:00:54.124 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:16:32 -0400 (0:00:00.039) 0:00:54.163 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:16:32 -0400 (0:00:00.034) 0:00:54.197 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:16:32 -0400 (0:00:00.033) 0:00:54.231 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 07 July 2025 20:16:32 -0400 (0:00:00.033) 0:00:54.265 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 07 July 2025 20:16:33 -0400 (0:00:00.053) 0:00:54.318 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 07 July 2025 20:16:33 -0400 (0:00:00.035) 0:00:54.354 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_kube_yamls_raw | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88
Monday 07 July 2025 20:16:33 -0400 (0:00:00.035) 0:00:54.389 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.yml",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106
Monday 07 July 2025 20:16:33 -0400 (0:00:00.077) 0:00:54.466 ***********
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113
Monday 07 July 2025 20:16:33 -0400 (0:00:00.039) 0:00:54.506 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state == \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:117
Monday 07 July 2025 20:16:33 -0400 (0:00:00.078) 0:00:54.585 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Monday 07 July 2025 20:16:33 -0400 (0:00:00.070) 0:00:54.655 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 07 July 2025 20:16:33 -0400 (0:00:00.062) 0:00:54.717 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 07 July 2025 20:16:33 -0400 (0:00:00.052) 0:00:54.770 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 07 July 2025 20:16:33 -0400 (0:00:00.051) 0:00:54.822 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Monday 07 July 2025 20:16:33 -0400 (0:00:00.053) 0:00:54.876 ***********
skipping: [managed-node1] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Monday 07 July 2025 20:16:33 -0400 (0:00:00.048) 0:00:54.925 ***********
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Monday 07 July 2025 20:16:33 -0400 (0:00:00.052) 0:00:54.977 ***********
ok: [managed-node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 4096,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:50
Monday 07 July 2025 20:16:34 -0400 (0:00:00.527) 0:00:55.504 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_quadlet_file_src | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:62
Monday 07 July 2025 20:16:34 -0400 (0:00:00.046) 0:00:55.550 ***********
changed: [managed-node1] => {
"changed": true,
"checksum": "998dccde0483b1654327a46ddd89cbaa47650370",
"dest": "/etc/containers/systemd/quadlet-demo.yml",
"gid": 0,
"group": "root",
"md5sum": "fd890594adfc24339cb9cdc5e7b19a66",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 1605,
"src": "/root/.ansible/tmp/ansible-tmp-1751933794.3273475-19728-80933705754489/.source.yml",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75
Monday 07 July 2025 20:16:34 -0400 (0:00:00.693) 0:00:56.244 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_copy_content is skipped",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87
Monday 07 July 2025 20:16:34 -0400 (0:00:00.032) 0:00:56.277 ***********
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115
Monday 07 July 2025 20:16:35 -0400 (0:00:00.704) 0:00:56.982 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_service_name | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:131
Monday 07 July 2025 20:16:35 -0400 (0:00:00.063) 0:00:57.045 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_service_name | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 07 July 2025 20:16:35 -0400 (0:00:00.064) 0:00:57.109 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo.kube",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Unit]\nRequires=quadlet-demo-mysql.service\nAfter=quadlet-demo-mysql.service\n\n[Kube]\n# Point to the yaml file in the same directory\nYaml=quadlet-demo.yml\n# Use the quadlet-demo network\nNetwork=quadlet-demo.network\n# Publish the envoy proxy data port\nPublishPort=8000:8080\n# Publish the envoy proxy admin port\nPublishPort=9000:9901\n# Use the envoy proxy config map in the same directory\nConfigMap=envoy-proxy-configmap.yml",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 07 July 2025 20:16:35 -0400 (0:00:00.073) 0:00:57.183 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 07 July 2025 20:16:35 -0400 (0:00:00.069) 0:00:57.253 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 07 July 2025 20:16:36 -0400 (0:00:00.041) 0:00:57.294 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "kube",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 07 July 2025 20:16:36 -0400 (0:00:00.127) 0:00:57.421 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:16:36 -0400 (0:00:00.088) 0:00:57.510 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:16:36 -0400 (0:00:00.041) 0:00:57.551 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:16:36 -0400 (0:00:00.035) 0:00:57.587 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:16:36 -0400 (0:00:00.045) 0:00:57.633 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1751933454.6873221,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "8bedde2dbca15219e1a3b95a68a8c0d26a92ba62",
"ctime": 1751933427.4892416,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 665568,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1748273472.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15496,
"uid": 0,
"version": "4278445899",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:16:36 -0400 (0:00:00.425) 0:00:58.058 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:16:36 -0400 (0:00:00.058) 0:00:58.117 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:16:36 -0400 (0:00:00.056) 0:00:58.173 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:16:36 -0400 (0:00:00.054) 0:00:58.228 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:16:37 -0400 (0:00:00.056) 0:00:58.285 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:16:37 -0400 (0:00:00.043) 0:00:58.328 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:16:37 -0400 (0:00:00.045) 0:00:58.374 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:16:37 -0400 (0:00:00.055) 0:00:58.429 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 07 July 2025 20:16:37 -0400 (0:00:00.056) 0:00:58.486 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": [
"quadlet-demo.yml"
],
"__podman_service_name": "quadlet-demo.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 07 July 2025 20:16:37 -0400 (0:00:00.069) 0:00:58.556 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 07 July 2025 20:16:37 -0400 (0:00:00.041) 0:00:58.598 ***********
ok: [managed-node1] => {
"changed": false,
"content": "LS0tCmFwaVZlcnNpb246IHYxCmtpbmQ6IFBlcnNpc3RlbnRWb2x1bWVDbGFpbQptZXRhZGF0YToKICBuYW1lOiB3cC1wdi1jbGFpbQogIGxhYmVsczoKICAgIGFwcDogd29yZHByZXNzCnNwZWM6CiAgYWNjZXNzTW9kZXM6CiAgLSBSZWFkV3JpdGVPbmNlCiAgcmVzb3VyY2VzOgogICAgcmVxdWVzdHM6CiAgICAgIHN0b3JhZ2U6IDIwR2kKLS0tCmFwaVZlcnNpb246IHYxCmtpbmQ6IFBvZAptZXRhZGF0YToKICBuYW1lOiBxdWFkbGV0LWRlbW8Kc3BlYzoKICBjb250YWluZXJzOgogIC0gbmFtZTogd29yZHByZXNzCiAgICBpbWFnZTogcXVheS5pby9saW51eC1zeXN0ZW0tcm9sZXMvd29yZHByZXNzOjQuOC1hcGFjaGUKICAgIGVudjoKICAgIC0gbmFtZTogV09SRFBSRVNTX0RCX0hPU1QKICAgICAgdmFsdWU6IHF1YWRsZXQtZGVtby1teXNxbAogICAgLSBuYW1lOiBXT1JEUFJFU1NfREJfUEFTU1dPUkQKICAgICAgdmFsdWVGcm9tOgogICAgICAgIHNlY3JldEtleVJlZjoKICAgICAgICAgIG5hbWU6IG15c3FsLXJvb3QtcGFzc3dvcmQta3ViZQogICAgICAgICAga2V5OiBwYXNzd29yZAogICAgdm9sdW1lTW91bnRzOgogICAgLSBuYW1lOiB3b3JkcHJlc3MtcGVyc2lzdGVudC1zdG9yYWdlCiAgICAgIG1vdW50UGF0aDogL3Zhci93d3cvaHRtbAogICAgcmVzb3VyY2VzOgogICAgICByZXF1ZXN0czoKICAgICAgICBtZW1vcnk6ICI2NE1pIgogICAgICAgIGNwdTogIjI1MG0iCiAgICAgIGxpbWl0czoKICAgICAgICBtZW1vcnk6ICIxMjhNaSIKICAgICAgICBjcHU6ICI1MDBtIgogIC0gbmFtZTogZW52b3kKICAgIGltYWdlOiBxdWF5LmlvL2xpbnV4LXN5c3RlbS1yb2xlcy9lbnZveXByb3h5OnYxLjI1LjAKICAgIHZvbHVtZU1vdW50czoKICAgIC0gbmFtZTogY29uZmlnLXZvbHVtZQogICAgICBtb3VudFBhdGg6IC9ldGMvZW52b3kKICAgIC0gbmFtZTogY2VydGlmaWNhdGVzCiAgICAgIG1vdW50UGF0aDogL2V0Yy9lbnZveS1jZXJ0aWZpY2F0ZXMKICAgIGVudjoKICAgIC0gbmFtZTogRU5WT1lfVUlECiAgICAgIHZhbHVlOiAiMCIKICAgIHJlc291cmNlczoKICAgICAgcmVxdWVzdHM6CiAgICAgICAgbWVtb3J5OiAiNjRNaSIKICAgICAgICBjcHU6ICIyNTBtIgogICAgICBsaW1pdHM6CiAgICAgICAgbWVtb3J5OiAiMTI4TWkiCiAgICAgICAgY3B1OiAiNTAwbSIKICB2b2x1bWVzOgogIC0gbmFtZTogY29uZmlnLXZvbHVtZQogICAgY29uZmlnTWFwOgogICAgICBuYW1lOiBlbnZveS1wcm94eS1jb25maWcKICAtIG5hbWU6IGNlcnRpZmljYXRlcwogICAgc2VjcmV0OgogICAgICBzZWNyZXROYW1lOiBlbnZveS1jZXJ0aWZpY2F0ZXMKICAtIG5hbWU6IHdvcmRwcmVzcy1wZXJzaXN0ZW50LXN0b3JhZ2UKICAgIHBlcnNpc3RlbnRWb2x1bWVDbGFpbToKICAgICAgY2xhaW1OYW1lOiB3cC1wdi1jbGFpbQogIC0gbmFtZTogd3d3ICAjIG5vdCB1c2VkIC0gZm9yIHRlc3RpbmcgaG9zdHBhdGgKICAgIGhvc3RQYXRoOgogICAgICBwYXRoOiAvdG1wL2h0dHBkMwogIC0gbmFtZTogY3JlYXRlICAjIG5vdCB1c2VkIC0gZm9yIHRlc3RpbmcgaG9zdHBhdGgKICAgIGhvc3RQYXRoOgogICAgICBwYXRoOiAvdG1wL2h0dHBkMy1jcmVhdGUK",
"encoding": "base64",
"source": "/etc/containers/systemd/quadlet-demo.yml"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88
Monday 07 July 2025 20:16:37 -0400 (0:00:00.368) 0:00:58.967 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [
"quay.io/linux-system-roles/wordpress:4.8-apache",
"quay.io/linux-system-roles/envoyproxy:v1.25.0"
],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.kube",
"__podman_volumes": [
"/tmp/httpd3",
"/tmp/httpd3-create"
]
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106
Monday 07 July 2025 20:16:37 -0400 (0:00:00.105) 0:00:59.072 ***********
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113
Monday 07 July 2025 20:16:37 -0400 (0:00:00.040) 0:00:59.112 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state == \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:117
Monday 07 July 2025 20:16:37 -0400 (0:00:00.072) 0:00:59.185 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Monday 07 July 2025 20:16:38 -0400 (0:00:00.098) 0:00:59.284 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 07 July 2025 20:16:38 -0400 (0:00:00.094) 0:00:59.378 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 07 July 2025 20:16:38 -0400 (0:00:00.053) 0:00:59.431 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 07 July 2025 20:16:38 -0400 (0:00:00.053) 0:00:59.485 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Monday 07 July 2025 20:16:38 -0400 (0:00:00.055) 0:00:59.541 ***********
changed: [managed-node1] => (item=/tmp/httpd3) => {
"ansible_loop_var": "item",
"changed": true,
"gid": 0,
"group": "root",
"item": "/tmp/httpd3",
"mode": "0755",
"owner": "root",
"path": "/tmp/httpd3",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
changed: [managed-node1] => (item=/tmp/httpd3-create) => {
"ansible_loop_var": "item",
"changed": true,
"gid": 0,
"group": "root",
"item": "/tmp/httpd3-create",
"mode": "0755",
"owner": "root",
"path": "/tmp/httpd3-create",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Monday 07 July 2025 20:16:39 -0400 (0:00:00.810) 0:01:00.351 ***********
changed: [managed-node1] => (item=None) => {
"attempts": 1,
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
changed: [managed-node1] => (item=None) => {
"attempts": 1,
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Monday 07 July 2025 20:16:54 -0400 (0:00:15.441) 0:01:15.793 ***********
ok: [managed-node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 4096,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:50
Monday 07 July 2025 20:16:54 -0400 (0:00:00.405) 0:01:16.198 ***********
changed: [managed-node1] => {
"changed": true,
"checksum": "7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7",
"dest": "/etc/containers/systemd/quadlet-demo.kube",
"gid": 0,
"group": "root",
"md5sum": "da53c88f92b68b0487aa209f795b6bb3",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 456,
"src": "/root/.ansible/tmp/ansible-tmp-1751933814.984563-20249-159053501176281/.source.kube",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:62
Monday 07 July 2025 20:16:55 -0400 (0:00:00.756) 0:01:16.955 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75
Monday 07 July 2025 20:16:55 -0400 (0:00:00.062) 0:01:17.017 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_copy_file is skipped",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87
Monday 07 July 2025 20:16:55 -0400 (0:00:00.058) 0:01:17.076 ***********
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115
Monday 07 July 2025 20:16:56 -0400 (0:00:00.769) 0:01:17.846 ***********
changed: [managed-node1] => {
"changed": true,
"name": "quadlet-demo.service",
"state": "started",
"status": {
"AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "basic.target sysinit.target quadlet-demo-mysql.service -.mount network-online.target system.slice quadlet-demo-network.service systemd-journald.socket",
"AllowIsolate": "no",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target multi-user.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroupId": "0",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "quadlet-demo.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo.service",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FinalKillSignal": "9",
"FragmentPath": "/run/systemd/generator/quadlet-demo.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "18446744073709551615",
"IOReadOperations": "18446744073709551615",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "18446744073709551615",
"IOWriteOperations": "18446744073709551615",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "quadlet-demo.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "13688",
"LimitNPROCSoft": "13688",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "13688",
"LimitSIGPENDINGSoft": "13688",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "infinity",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "all",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "system.slice -.mount quadlet-demo-network.service sysinit.target quadlet-demo-mysql.service",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartKillSignal": "15",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo.kube",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "21900",
"TimeoutAbortUSec": "1min 30s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopFailureMode": "terminate",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "notify",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "network-online.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "infinity"
}
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:131
Monday 07 July 2025 20:16:57 -0400 (0:00:01.374) 0:01:19.220 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_service_started is changed",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Cancel linger] ************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198
Monday 07 July 2025 20:16:58 -0400 (0:00:00.059) 0:01:19.279 ***********
skipping: [managed-node1] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.podman : Handle credential files - absent] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:204
Monday 07 July 2025 20:16:58 -0400 (0:00:00.051) 0:01:19.331 ***********
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:213
Monday 07 July 2025 20:16:58 -0400 (0:00:00.049) 0:01:19.380 ***********
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [Check quadlet files] *****************************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:96
Monday 07 July 2025 20:16:58 -0400 (0:00:00.144) 0:01:19.525 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": [
"ls",
"-alrtF",
"/etc/containers/systemd"
],
"delta": "0:00:00.006550",
"end": "2025-07-07 20:16:58.738418",
"rc": 0,
"start": "2025-07-07 20:16:58.731868"
}
STDOUT:
total 44
-rw-r--r--. 1 root root 171 Jul 7 20:11 auth_test_1_quadlet.container
drwxr-xr-x. 10 root root 4096 Jul 7 20:14 ../
-rw-r--r--. 1 root root 151 Jul 7 20:15 nopull.container
-rw-r--r--. 1 root root 138 Jul 7 20:15 bogus.container
-rw-r--r--. 1 root root 74 Jul 7 20:16 quadlet-demo.network
-rw-r--r--. 1 root root 9 Jul 7 20:16 quadlet-demo-mysql.volume
-rw-r--r--. 1 root root 363 Jul 7 20:16 quadlet-demo-mysql.container
-rw-r--r--. 1 root root 2102 Jul 7 20:16 envoy-proxy-configmap.yml
-rw-r--r--. 1 root root 1605 Jul 7 20:16 quadlet-demo.yml
-rw-r--r--. 1 root root 456 Jul 7 20:16 quadlet-demo.kube
drwxr-xr-x. 2 root root 4096 Jul 7 20:16 ./
TASK [Check containers] ********************************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:100
Monday 07 July 2025 20:16:58 -0400 (0:00:00.598) 0:01:20.124 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"-a"
],
"delta": "0:00:00.072978",
"end": "2025-07-07 20:16:59.511171",
"failed_when_result": false,
"rc": 0,
"start": "2025-07-07 20:16:59.438193"
}
STDOUT:
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
f33934011335 quay.io/linux-system-roles/mysql:5.6 mysqld 32 seconds ago Up 33 seconds (healthy) 3306/tcp quadlet-demo-mysql
5b34c5fc3265 2 seconds ago Up 2 seconds a96f3a51b8d1-service
fc833924e791 2 seconds ago Up 2 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp b57dd77843de-infra
8f4b73cb87b9 quay.io/linux-system-roles/wordpress:4.8-apache apache2-foregroun... 2 seconds ago Up 2 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 80/tcp quadlet-demo-wordpress
f5dcbbf00e55 quay.io/linux-system-roles/envoyproxy:v1.25.0 envoy -c /etc/env... 2 seconds ago Up 2 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 10000/tcp quadlet-demo-envoy
TASK [Check volumes] ***********************************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:105
Monday 07 July 2025 20:16:59 -0400 (0:00:00.754) 0:01:20.878 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"volume",
"ls"
],
"delta": "0:00:00.044753",
"end": "2025-07-07 20:16:59.947912",
"failed_when_result": false,
"rc": 0,
"start": "2025-07-07 20:16:59.903159"
}
STDOUT:
DRIVER VOLUME NAME
local systemd-quadlet-demo-mysql
local wp-pv-claim
local envoy-proxy-config
local envoy-certificates
TASK [Check pods] **************************************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:110
Monday 07 July 2025 20:17:00 -0400 (0:00:00.417) 0:01:21.296 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"pod",
"ps",
"--ctr-ids",
"--ctr-names",
"--ctr-status"
],
"delta": "0:00:00.067539",
"end": "2025-07-07 20:17:00.479662",
"failed_when_result": false,
"rc": 0,
"start": "2025-07-07 20:17:00.412123"
}
STDOUT:
POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS
b57dd77843de quadlet-demo Running 3 seconds ago fc833924e791 fc833924e791,8f4b73cb87b9,f5dcbbf00e55 b57dd77843de-infra,quadlet-demo-wordpress,quadlet-demo-envoy running,running,running
TASK [Check systemd] ***********************************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:115
Monday 07 July 2025 20:17:00 -0400 (0:00:00.541) 0:01:21.837 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": "set -euo pipefail; systemctl list-units | grep quadlet",
"delta": "0:00:00.020948",
"end": "2025-07-07 20:17:01.005370",
"failed_when_result": false,
"rc": 0,
"start": "2025-07-07 20:17:00.984422"
}
STDOUT:
quadlet-demo-mysql-volume.service loaded active exited quadlet-demo-mysql-volume.service
quadlet-demo-mysql.service loaded active running quadlet-demo-mysql.service
quadlet-demo-network.service loaded active exited quadlet-demo-network.service
quadlet-demo.service loaded active running quadlet-demo.service
TASK [Check web] ***************************************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:121
Monday 07 July 2025 20:17:01 -0400 (0:00:00.524) 0:01:22.362 ***********
changed: [managed-node1] => {
"attempts": 1,
"changed": true,
"checksum_dest": null,
"checksum_src": "d1ac587ee4653b36ed40791b2bca2a83cf8cb157",
"dest": "/run/out",
"elapsed": 0,
"gid": 0,
"group": "root",
"md5sum": "95e8238992037c7b6b6decebba46e982",
"mode": "0600",
"owner": "root",
"secontext": "system_u:object_r:var_run_t:s0",
"size": 11666,
"src": "/root/.ansible/tmp/ansible-tmp-1751933821.1347373-20465-22063181325849/tmptfare9_n",
"state": "file",
"status_code": 200,
"uid": 0,
"url": "https://localhost:8000"
}
MSG:
OK (unknown bytes)
TASK [Show web] ****************************************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:132
Monday 07 July 2025 20:17:02 -0400 (0:00:01.617) 0:01:23.979 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": [
"cat",
"/run/out"
],
"delta": "0:00:00.004157",
"end": "2025-07-07 20:17:03.169009",
"rc": 0,
"start": "2025-07-07 20:17:03.164852"
}
STDOUT:
WordPress › Installation
WordPress
TASK [Error] *******************************************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:137
Monday 07 July 2025 20:17:03 -0400 (0:00:00.581) 0:01:24.561 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__web_status is failed",
"skip_reason": "Conditional result was False"
}
TASK [Check] *******************************************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:148
Monday 07 July 2025 20:17:03 -0400 (0:00:00.051) 0:01:24.612 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"-a"
],
"delta": "0:00:00.040764",
"end": "2025-07-07 20:17:03.789449",
"rc": 0,
"start": "2025-07-07 20:17:03.748685"
}
STDOUT:
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
f33934011335 quay.io/linux-system-roles/mysql:5.6 mysqld 37 seconds ago Up 37 seconds (healthy) 3306/tcp quadlet-demo-mysql
5b34c5fc3265 6 seconds ago Up 6 seconds a96f3a51b8d1-service
fc833924e791 6 seconds ago Up 6 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp b57dd77843de-infra
8f4b73cb87b9 quay.io/linux-system-roles/wordpress:4.8-apache apache2-foregroun... 6 seconds ago Up 6 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 80/tcp quadlet-demo-wordpress
f5dcbbf00e55 quay.io/linux-system-roles/envoyproxy:v1.25.0 envoy -c /etc/env... 6 seconds ago Up 6 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 10000/tcp quadlet-demo-envoy
TASK [Check pods] **************************************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:152
Monday 07 July 2025 20:17:03 -0400 (0:00:00.525) 0:01:25.137 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"pod",
"ps",
"--ctr-ids",
"--ctr-names",
"--ctr-status"
],
"delta": "0:00:00.061145",
"end": "2025-07-07 20:17:04.309025",
"failed_when_result": false,
"rc": 0,
"start": "2025-07-07 20:17:04.247880"
}
STDOUT:
POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS
b57dd77843de quadlet-demo Running 7 seconds ago fc833924e791 fc833924e791,8f4b73cb87b9,f5dcbbf00e55 b57dd77843de-infra,quadlet-demo-wordpress,quadlet-demo-envoy running,running,running
TASK [Check systemd] ***********************************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:157
Monday 07 July 2025 20:17:04 -0400 (0:00:00.532) 0:01:25.670 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": "set -euo pipefail; systemctl list-units --all | grep quadlet",
"delta": "0:00:00.021817",
"end": "2025-07-07 20:17:04.807850",
"failed_when_result": false,
"rc": 0,
"start": "2025-07-07 20:17:04.786033"
}
STDOUT:
auth_test_1_quadlet.service loaded inactive dead auth_test_1_quadlet.service
quadlet-demo-mysql-volume.service loaded active exited quadlet-demo-mysql-volume.service
quadlet-demo-mysql.service loaded active running quadlet-demo-mysql.service
quadlet-demo-network.service loaded active exited quadlet-demo-network.service
quadlet-demo.service loaded active running quadlet-demo.service
TASK [LS] **********************************************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:165
Monday 07 July 2025 20:17:04 -0400 (0:00:00.495) 0:01:26.166 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": [
"ls",
"-alrtF",
"/etc/systemd/system"
],
"delta": "0:00:00.008899",
"end": "2025-07-07 20:17:05.310916",
"failed_when_result": false,
"rc": 0,
"start": "2025-07-07 20:17:05.302017"
}
STDOUT:
total 8
drwxr-xr-x. 2 root root 32 Jun 24 03:12 getty.target.wants/
lrwxrwxrwx. 1 root root 37 Jun 24 03:12 ctrl-alt-del.target -> /usr/lib/systemd/system/reboot.target
lrwxrwxrwx. 1 root root 43 Jun 24 03:12 dbus.service -> /usr/lib/systemd/system/dbus-broker.service
drwxr-xr-x. 4 root root 166 Jun 24 03:12 ../
drwxr-xr-x. 2 root root 182 Jun 24 03:12 sysinit.target.wants/
lrwxrwxrwx. 1 root root 57 Jun 24 03:13 dbus-org.freedesktop.nm-dispatcher.service -> /usr/lib/systemd/system/NetworkManager-dispatcher.service
drwxr-xr-x. 2 root root 48 Jun 24 03:13 network-online.target.wants/
drwxr-xr-x. 2 root root 56 Jun 24 03:13 timers.target.wants/
drwxr-xr-x. 2 root root 38 Jun 24 03:13 dev-virtio\x2dports-org.qemu.guest_agent.0.device.wants/
drwxr-xr-x. 2 root root 31 Jun 24 03:13 basic.target.wants/
lrwxrwxrwx. 1 root root 41 Jun 24 03:15 default.target -> /usr/lib/systemd/system/multi-user.target
drwxr-xr-x. 2 root root 70 Jun 24 03:27 sockets.target.wants/
drwxr-xr-x. 2 root root 31 Jun 24 03:27 remote-fs.target.wants/
drwxr-xr-x. 2 root root 119 Jun 24 03:28 cloud-init.target.wants/
drwxr-xr-x. 2 root root 4096 Jul 7 20:15 multi-user.target.wants/
lrwxrwxrwx. 1 root root 41 Jul 7 20:15 dbus-org.fedoraproject.FirewallD1.service -> /usr/lib/systemd/system/firewalld.service
drwxr-xr-x. 12 root root 4096 Jul 7 20:15 ./
TASK [Cleanup] *****************************************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:172
Monday 07 July 2025 20:17:05 -0400 (0:00:00.522) 0:01:26.688 ***********
included: fedora.linux_system_roles.podman for managed-node1
TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3
Monday 07 July 2025 20:17:05 -0400 (0:00:00.099) 0:01:26.788 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] ****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3
Monday 07 July 2025 20:17:05 -0400 (0:00:00.066) 0:01:26.854 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11
Monday 07 July 2025 20:17:05 -0400 (0:00:00.044) 0:01:26.899 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_is_ostree is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16
Monday 07 July 2025 20:17:05 -0400 (0:00:00.032) 0:01:26.932 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_is_ostree is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23
Monday 07 July 2025 20:17:05 -0400 (0:00:00.082) 0:01:27.014 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_is_transactional is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28
Monday 07 July 2025 20:17:05 -0400 (0:00:00.038) 0:01:27.052 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_is_transactional is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32
Monday 07 July 2025 20:17:05 -0400 (0:00:00.045) 0:01:27.097 ***********
ok: [managed-node1] => (item=RedHat.yml) => {
"ansible_facts": {
"__podman_packages": [
"podman",
"shadow-utils-subid"
]
},
"ansible_included_var_files": [
"/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "RedHat.yml"
}
skipping: [managed-node1] => (item=CentOS.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "CentOS.yml",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=CentOS_9.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "CentOS_9.yml",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=CentOS_9.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "CentOS_9.yml",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Gather the package facts] *************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6
Monday 07 July 2025 20:17:05 -0400 (0:00:00.119) 0:01:27.217 ***********
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Enable copr if requested] *************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10
Monday 07 July 2025 20:17:07 -0400 (0:00:01.159) 0:01:28.377 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_use_copr | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14
Monday 07 July 2025 20:17:07 -0400 (0:00:00.033) 0:01:28.410 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "(__podman_packages | difference(ansible_facts.packages)) | list | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28
Monday 07 July 2025 20:17:07 -0400 (0:00:00.040) 0:01:28.451 ***********
skipping: [managed-node1] => {
"false_condition": "__podman_is_transactional | d(false)"
}
TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33
Monday 07 July 2025 20:17:07 -0400 (0:00:00.040) 0:01:28.491 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38
Monday 07 July 2025 20:17:07 -0400 (0:00:00.050) 0:01:28.542 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get podman version] *******************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46
Monday 07 July 2025 20:17:07 -0400 (0:00:00.052) 0:01:28.595 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"--version"
],
"delta": "0:00:00.026725",
"end": "2025-07-07 20:17:07.646492",
"rc": 0,
"start": "2025-07-07 20:17:07.619767"
}
STDOUT:
podman version 5.5.1
TASK [fedora.linux_system_roles.podman : Set podman version] *******************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52
Monday 07 July 2025 20:17:07 -0400 (0:00:00.396) 0:01:28.991 ***********
ok: [managed-node1] => {
"ansible_facts": {
"podman_version": "5.5.1"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56
Monday 07 July 2025 20:17:07 -0400 (0:00:00.037) 0:01:29.028 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_version is version(\"4.2\", \"<\")",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63
Monday 07 July 2025 20:17:07 -0400 (0:00:00.032) 0:01:29.061 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_version is version(\"4.4\", \"<\")",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73
Monday 07 July 2025 20:17:07 -0400 (0:00:00.040) 0:01:29.102 ***********
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
"skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}
MSG:
end_host conditional evaluated to false, continuing execution for managed-node1
TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80
Monday 07 July 2025 20:17:07 -0400 (0:00:00.041) 0:01:29.143 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96
Monday 07 July 2025 20:17:07 -0400 (0:00:00.073) 0:01:29.217 ***********
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
"skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}
MSG:
end_host conditional evaluated to false, continuing execution for managed-node1
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109
Monday 07 July 2025 20:17:08 -0400 (0:00:00.165) 0:01:29.383 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:17:08 -0400 (0:00:00.112) 0:01:29.495 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:17:08 -0400 (0:00:00.046) 0:01:29.542 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:17:08 -0400 (0:00:00.046) 0:01:29.589 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:17:08 -0400 (0:00:00.053) 0:01:29.642 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1751933454.6873221,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "8bedde2dbca15219e1a3b95a68a8c0d26a92ba62",
"ctime": 1751933427.4892416,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 665568,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1748273472.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15496,
"uid": 0,
"version": "4278445899",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:17:08 -0400 (0:00:00.372) 0:01:30.015 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:17:08 -0400 (0:00:00.035) 0:01:30.051 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:17:08 -0400 (0:00:00.035) 0:01:30.086 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:17:08 -0400 (0:00:00.036) 0:01:30.123 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:17:08 -0400 (0:00:00.039) 0:01:30.162 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:17:08 -0400 (0:00:00.053) 0:01:30.215 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:17:08 -0400 (0:00:00.050) 0:01:30.266 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:17:09 -0400 (0:00:00.040) 0:01:30.307 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set config file paths] ****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115
Monday 07 July 2025 20:17:09 -0400 (0:00:00.040) 0:01:30.348 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf",
"__podman_parent_mode": "0755",
"__podman_parent_path": "/etc/containers",
"__podman_policy_json_file": "/etc/containers/policy.json",
"__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf",
"__podman_storage_conf_file": "/etc/containers/storage.conf"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle container.conf.d] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126
Monday 07 July 2025 20:17:09 -0400 (0:00:00.055) 0:01:30.403 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] ***********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5
Monday 07 July 2025 20:17:09 -0400 (0:00:00.062) 0:01:30.466 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_containers_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update container config file] *********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13
Monday 07 July 2025 20:17:09 -0400 (0:00:00.075) 0:01:30.542 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_containers_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] *************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129
Monday 07 July 2025 20:17:09 -0400 (0:00:00.033) 0:01:30.575 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] ***********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5
Monday 07 July 2025 20:17:09 -0400 (0:00:00.068) 0:01:30.644 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_registries_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update registries config file] ********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13
Monday 07 July 2025 20:17:09 -0400 (0:00:00.055) 0:01:30.700 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_registries_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle storage.conf] ******************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132
Monday 07 July 2025 20:17:09 -0400 (0:00:00.041) 0:01:30.741 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7
Monday 07 July 2025 20:17:09 -0400 (0:00:00.076) 0:01:30.817 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_storage_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update storage config file] ***********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15
Monday 07 July 2025 20:17:09 -0400 (0:00:00.040) 0:01:30.857 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_storage_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle policy.json] *******************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135
Monday 07 July 2025 20:17:09 -0400 (0:00:00.037) 0:01:30.895 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8
Monday 07 July 2025 20:17:09 -0400 (0:00:00.066) 0:01:30.961 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16
Monday 07 July 2025 20:17:09 -0400 (0:00:00.034) 0:01:30.996 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get the existing policy.json] *********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21
Monday 07 July 2025 20:17:09 -0400 (0:00:00.032) 0:01:31.029 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Write new policy.json file] ***********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27
Monday 07 July 2025 20:17:09 -0400 (0:00:00.038) 0:01:31.068 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [Manage firewall for specified ports] *************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141
Monday 07 July 2025 20:17:09 -0400 (0:00:00.054) 0:01:31.122 ***********
included: fedora.linux_system_roles.firewall for managed-node1
TASK [fedora.linux_system_roles.firewall : Setup firewalld] ********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2
Monday 07 July 2025 20:17:10 -0400 (0:00:00.188) 0:01:31.311 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node1
TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2
Monday 07 July 2025 20:17:10 -0400 (0:00:00.166) 0:01:31.477 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Check if system is ostree] **********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10
Monday 07 July 2025 20:17:10 -0400 (0:00:00.067) 0:01:31.544 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_ostree is not defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15
Monday 07 July 2025 20:17:10 -0400 (0:00:00.056) 0:01:31.600 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_ostree is not defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22
Monday 07 July 2025 20:17:10 -0400 (0:00:00.053) 0:01:31.654 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __firewall_is_transactional is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27
Monday 07 July 2025 20:17:10 -0400 (0:00:00.055) 0:01:31.709 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __firewall_is_transactional is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Run systemctl] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:34
Monday 07 July 2025 20:17:10 -0400 (0:00:00.058) 0:01:31.768 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_booted is not defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Require installed systemd] **********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:41
Monday 07 July 2025 20:17:10 -0400 (0:00:00.055) 0:01:31.823 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_booted is not defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Set flag to indicate that systemd runtime operations are available] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:46
Monday 07 July 2025 20:17:10 -0400 (0:00:00.052) 0:01:31.876 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_booted is not defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Install firewalld] ******************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:51
Monday 07 July 2025 20:17:10 -0400 (0:00:00.038) 0:01:31.915 ***********
ok: [managed-node1] => {
"changed": false,
"rc": 0,
"results": []
}
MSG:
Nothing to do
TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:63
Monday 07 July 2025 20:17:13 -0400 (0:00:03.192) 0:01:35.107 ***********
skipping: [managed-node1] => {
"false_condition": "__firewall_is_transactional | d(false)"
}
TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:68
Monday 07 July 2025 20:17:13 -0400 (0:00:00.055) 0:01:35.163 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:73
Monday 07 July 2025 20:17:13 -0400 (0:00:00.054) 0:01:35.218 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Check which conflicting services are enabled] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5
Monday 07 July 2025 20:17:13 -0400 (0:00:00.055) 0:01:35.274 ***********
skipping: [managed-node1] => (item=nftables) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "nftables",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=iptables) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "iptables",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=ufw) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "ufw",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:14
Monday 07 July 2025 20:17:14 -0400 (0:00:00.077) 0:01:35.352 ***********
skipping: [managed-node1] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'nftables', 'ansible_loop_var': 'item'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "nftables",
"skip_reason": "Conditional result was False",
"skipped": true
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'iptables', 'ansible_loop_var': 'item'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "iptables",
"skip_reason": "Conditional result was False",
"skipped": true
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'ufw', 'ansible_loop_var': 'item'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "ufw",
"skip_reason": "Conditional result was False",
"skipped": true
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] ***********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:24
Monday 07 July 2025 20:17:14 -0400 (0:00:00.067) 0:01:35.420 ***********
ok: [managed-node1] => {
"changed": false,
"name": "firewalld",
"status": {
"AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0",
"ActiveEnterTimestamp": "Mon 2025-07-07 20:15:58 EDT",
"ActiveEnterTimestampMonotonic": "786637990",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "polkit.service basic.target sysinit.target dbus-broker.service system.slice dbus.socket",
"AllowIsolate": "no",
"AssertResult": "yes",
"AssertTimestamp": "Mon 2025-07-07 20:15:58 EDT",
"AssertTimestampMonotonic": "786420496",
"Before": "shutdown.target multi-user.target network-pre.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedoraproject.FirewallD1",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "617654000",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Mon 2025-07-07 20:15:58 EDT",
"ConditionTimestampMonotonic": "786420493",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "iptables.service shutdown.target ip6tables.service ebtables.service ipset.service",
"ControlGroup": "/system.slice/firewalld.service",
"ControlGroupId": "82013",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "firewalld - dynamic firewall daemon",
"DevicePolicy": "auto",
"Documentation": "\"man:firewalld(1)\"",
"DynamicUser": "no",
"EffectiveCPUs": "0-1",
"EffectiveMemoryNodes": "0",
"EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "75166",
"ExecMainStartTimestamp": "Mon 2025-07-07 20:15:58 EDT",
"ExecMainStartTimestampMonotonic": "786429452",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FinalKillSignal": "9",
"FragmentPath": "/usr/lib/systemd/system/firewalld.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "18446744073709551615",
"IOReadOperations": "18446744073709551615",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "18446744073709551615",
"IOWriteOperations": "18446744073709551615",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "firewalld.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Mon 2025-07-07 20:15:58 EDT",
"InactiveExitTimestampMonotonic": "786429796",
"InvocationID": "1900126e0451434090943b217e23bf08",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "13688",
"LimitNPROCSoft": "13688",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "13688",
"LimitSIGPENDINGSoft": "13688",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "75166",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "infinity",
"MemoryCurrent": "32342016",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "sysinit.target system.slice dbus.socket",
"Restart": "no",
"RestartKillSignal": "15",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardOutput": "null",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestamp": "Mon 2025-07-07 20:16:56 EDT",
"StateChangeTimestampMonotonic": "844826550",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "2",
"TasksMax": "21900",
"TimeoutAbortUSec": "1min 30s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopFailureMode": "terminate",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "enabled",
"UnitFileState": "enabled",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "network-pre.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:30
Monday 07 July 2025 20:17:14 -0400 (0:00:00.572) 0:01:35.992 ***********
ok: [managed-node1] => {
"changed": false,
"enabled": true,
"name": "firewalld",
"state": "started",
"status": {
"AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0",
"ActiveEnterTimestamp": "Mon 2025-07-07 20:15:58 EDT",
"ActiveEnterTimestampMonotonic": "786637990",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "polkit.service basic.target sysinit.target dbus-broker.service system.slice dbus.socket",
"AllowIsolate": "no",
"AssertResult": "yes",
"AssertTimestamp": "Mon 2025-07-07 20:15:58 EDT",
"AssertTimestampMonotonic": "786420496",
"Before": "shutdown.target multi-user.target network-pre.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedoraproject.FirewallD1",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "617654000",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Mon 2025-07-07 20:15:58 EDT",
"ConditionTimestampMonotonic": "786420493",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "iptables.service shutdown.target ip6tables.service ebtables.service ipset.service",
"ControlGroup": "/system.slice/firewalld.service",
"ControlGroupId": "82013",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "firewalld - dynamic firewall daemon",
"DevicePolicy": "auto",
"Documentation": "\"man:firewalld(1)\"",
"DynamicUser": "no",
"EffectiveCPUs": "0-1",
"EffectiveMemoryNodes": "0",
"EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "75166",
"ExecMainStartTimestamp": "Mon 2025-07-07 20:15:58 EDT",
"ExecMainStartTimestampMonotonic": "786429452",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FinalKillSignal": "9",
"FragmentPath": "/usr/lib/systemd/system/firewalld.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "18446744073709551615",
"IOReadOperations": "18446744073709551615",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "18446744073709551615",
"IOWriteOperations": "18446744073709551615",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "firewalld.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Mon 2025-07-07 20:15:58 EDT",
"InactiveExitTimestampMonotonic": "786429796",
"InvocationID": "1900126e0451434090943b217e23bf08",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "13688",
"LimitNPROCSoft": "13688",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "13688",
"LimitSIGPENDINGSoft": "13688",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "75166",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "infinity",
"MemoryCurrent": "32342016",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "sysinit.target system.slice dbus.socket",
"Restart": "no",
"RestartKillSignal": "15",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardOutput": "null",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestamp": "Mon 2025-07-07 20:16:56 EDT",
"StateChangeTimestampMonotonic": "844826550",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "2",
"TasksMax": "21900",
"TimeoutAbortUSec": "1min 30s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopFailureMode": "terminate",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "enabled",
"UnitFileState": "enabled",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "network-pre.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:36
Monday 07 July 2025 20:17:15 -0400 (0:00:00.573) 0:01:36.566 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__firewall_previous_replaced": false,
"__firewall_python_cmd": "/usr/bin/python3.9",
"__firewall_report_changed": true
},
"changed": false
}
TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:45
Monday 07 July 2025 20:17:15 -0400 (0:00:00.070) 0:01:36.636 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:58
Monday 07 July 2025 20:17:15 -0400 (0:00:00.056) 0:01:36.693 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Configure firewall] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:74
Monday 07 July 2025 20:17:15 -0400 (0:00:00.115) 0:01:36.808 ***********
ok: [managed-node1] => (item={'port': '8000/tcp', 'state': 'enabled'}) => {
"__firewall_changed": false,
"ansible_loop_var": "item",
"changed": false,
"item": {
"port": "8000/tcp",
"state": "enabled"
}
}
ok: [managed-node1] => (item={'port': '9000/tcp', 'state': 'enabled'}) => {
"__firewall_changed": false,
"ansible_loop_var": "item",
"changed": false,
"item": {
"port": "9000/tcp",
"state": "enabled"
}
}
TASK [fedora.linux_system_roles.firewall : Gather firewall config information] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:126
Monday 07 July 2025 20:17:16 -0400 (0:00:01.032) 0:01:37.841 ***********
skipping: [managed-node1] => (item={'port': '8000/tcp', 'state': 'enabled'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall | length == 1",
"item": {
"port": "8000/tcp",
"state": "enabled"
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item={'port': '9000/tcp', 'state': 'enabled'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall | length == 1",
"item": {
"port": "9000/tcp",
"state": "enabled"
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:137
Monday 07 July 2025 20:17:16 -0400 (0:00:00.070) 0:01:37.911 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall | length == 1",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:146
Monday 07 July 2025 20:17:16 -0400 (0:00:00.062) 0:01:37.973 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall == None or firewall | length == 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:152
Monday 07 July 2025 20:17:16 -0400 (0:00:00.059) 0:01:38.033 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall == None or firewall | length == 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:161
Monday 07 July 2025 20:17:16 -0400 (0:00:00.059) 0:01:38.092 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Calculate what has changed] *********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:172
Monday 07 July 2025 20:17:16 -0400 (0:00:00.054) 0:01:38.147 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Show diffs] *************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:178
Monday 07 July 2025 20:17:16 -0400 (0:00:00.056) 0:01:38.203 ***********
skipping: [managed-node1] => {
"false_condition": "__firewall_previous_replaced | bool"
}
TASK [Manage selinux for specified ports] **************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148
Monday 07 July 2025 20:17:17 -0400 (0:00:00.086) 0:01:38.289 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_selinux_ports | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155
Monday 07 July 2025 20:17:17 -0400 (0:00:00.058) 0:01:38.348 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_cancel_user_linger": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] *******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159
Monday 07 July 2025 20:17:17 -0400 (0:00:00.062) 0:01:38.411 ***********
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle credential files - present] ****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168
Monday 07 July 2025 20:17:17 -0400 (0:00:00.050) 0:01:38.461 ***********
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle secrets] ***********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177
Monday 07 July 2025 20:17:17 -0400 (0:00:00.052) 0:01:38.514 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Monday 07 July 2025 20:17:17 -0400 (0:00:00.167) 0:01:38.681 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Monday 07 July 2025 20:17:17 -0400 (0:00:00.059) 0:01:38.741 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:17:17 -0400 (0:00:00.099) 0:01:38.841 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:17:17 -0400 (0:00:00.102) 0:01:38.943 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:17:17 -0400 (0:00:00.046) 0:01:38.990 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:17:17 -0400 (0:00:00.050) 0:01:39.040 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:17:17 -0400 (0:00:00.033) 0:01:39.074 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:17:17 -0400 (0:00:00.033) 0:01:39.107 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:17:17 -0400 (0:00:00.033) 0:01:39.140 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:17:17 -0400 (0:00:00.032) 0:01:39.172 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:17:17 -0400 (0:00:00.037) 0:01:39.210 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:17:17 -0400 (0:00:00.055) 0:01:39.265 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:17:18 -0400 (0:00:00.045) 0:01:39.311 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:17:18 -0400 (0:00:00.035) 0:01:39.346 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14
Monday 07 July 2025 20:17:18 -0400 (0:00:00.040) 0:01:39.387 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20
Monday 07 July 2025 20:17:18 -0400 (0:00:00.059) 0:01:39.446 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 07 July 2025 20:17:18 -0400 (0:00:00.067) 0:01:39.513 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 07 July 2025 20:17:18 -0400 (0:00:00.032) 0:01:39.546 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 07 July 2025 20:17:18 -0400 (0:00:00.033) 0:01:39.579 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25
Monday 07 July 2025 20:17:18 -0400 (0:00:00.032) 0:01:39.612 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41
Monday 07 July 2025 20:17:18 -0400 (0:00:00.032) 0:01:39.644 ***********
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Monday 07 July 2025 20:17:18 -0400 (0:00:00.413) 0:01:40.058 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Monday 07 July 2025 20:17:18 -0400 (0:00:00.084) 0:01:40.142 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:17:18 -0400 (0:00:00.061) 0:01:40.203 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:17:18 -0400 (0:00:00.037) 0:01:40.241 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:17:18 -0400 (0:00:00.037) 0:01:40.278 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:17:19 -0400 (0:00:00.047) 0:01:40.325 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:17:19 -0400 (0:00:00.034) 0:01:40.359 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:17:19 -0400 (0:00:00.034) 0:01:40.394 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:17:19 -0400 (0:00:00.039) 0:01:40.434 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:17:19 -0400 (0:00:00.034) 0:01:40.469 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:17:19 -0400 (0:00:00.034) 0:01:40.504 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:17:19 -0400 (0:00:00.035) 0:01:40.539 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:17:19 -0400 (0:00:00.035) 0:01:40.575 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:17:19 -0400 (0:00:00.034) 0:01:40.610 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14
Monday 07 July 2025 20:17:19 -0400 (0:00:00.034) 0:01:40.644 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20
Monday 07 July 2025 20:17:19 -0400 (0:00:00.042) 0:01:40.687 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 07 July 2025 20:17:19 -0400 (0:00:00.062) 0:01:40.750 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 07 July 2025 20:17:19 -0400 (0:00:00.033) 0:01:40.784 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 07 July 2025 20:17:19 -0400 (0:00:00.032) 0:01:40.816 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25
Monday 07 July 2025 20:17:19 -0400 (0:00:00.083) 0:01:40.900 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41
Monday 07 July 2025 20:17:19 -0400 (0:00:00.032) 0:01:40.932 ***********
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Monday 07 July 2025 20:17:20 -0400 (0:00:00.411) 0:01:41.344 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Monday 07 July 2025 20:17:20 -0400 (0:00:00.038) 0:01:41.382 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:17:20 -0400 (0:00:00.066) 0:01:41.448 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:17:20 -0400 (0:00:00.039) 0:01:41.487 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:17:20 -0400 (0:00:00.036) 0:01:41.524 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:17:20 -0400 (0:00:00.046) 0:01:41.571 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:17:20 -0400 (0:00:00.034) 0:01:41.605 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:17:20 -0400 (0:00:00.035) 0:01:41.641 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:17:20 -0400 (0:00:00.034) 0:01:41.675 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:17:20 -0400 (0:00:00.034) 0:01:41.709 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:17:20 -0400 (0:00:00.034) 0:01:41.744 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:17:20 -0400 (0:00:00.035) 0:01:41.779 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:17:20 -0400 (0:00:00.033) 0:01:41.813 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:17:20 -0400 (0:00:00.034) 0:01:41.847 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14
Monday 07 July 2025 20:17:20 -0400 (0:00:00.034) 0:01:41.882 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20
Monday 07 July 2025 20:17:20 -0400 (0:00:00.039) 0:01:41.922 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 07 July 2025 20:17:20 -0400 (0:00:00.109) 0:01:42.032 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 07 July 2025 20:17:20 -0400 (0:00:00.033) 0:01:42.065 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 07 July 2025 20:17:20 -0400 (0:00:00.031) 0:01:42.097 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25
Monday 07 July 2025 20:17:20 -0400 (0:00:00.032) 0:01:42.130 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41
Monday 07 July 2025 20:17:20 -0400 (0:00:00.031) 0:01:42.162 ***********
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184
Monday 07 July 2025 20:17:21 -0400 (0:00:00.406) 0:01:42.568 ***********
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191
Monday 07 July 2025 20:17:21 -0400 (0:00:00.031) 0:01:42.599 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 07 July 2025 20:17:21 -0400 (0:00:00.156) 0:01:42.755 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo.kube",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Unit]\nRequires=quadlet-demo-mysql.service\nAfter=quadlet-demo-mysql.service\n\n[Kube]\n# Point to the yaml file in the same directory\nYaml=quadlet-demo.yml\n# Use the quadlet-demo network\nNetwork=quadlet-demo.network\n# Publish the envoy proxy data port\nPublishPort=8000:8080\n# Publish the envoy proxy admin port\nPublishPort=9000:9901\n# Use the envoy proxy config map in the same directory\nConfigMap=envoy-proxy-configmap.yml",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 07 July 2025 20:17:21 -0400 (0:00:00.045) 0:01:42.800 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "absent",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 07 July 2025 20:17:21 -0400 (0:00:00.042) 0:01:42.843 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 07 July 2025 20:17:21 -0400 (0:00:00.033) 0:01:42.877 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "kube",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 07 July 2025 20:17:21 -0400 (0:00:00.049) 0:01:42.926 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:17:21 -0400 (0:00:00.061) 0:01:42.988 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:17:21 -0400 (0:00:00.037) 0:01:43.025 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:17:21 -0400 (0:00:00.034) 0:01:43.060 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:17:21 -0400 (0:00:00.088) 0:01:43.149 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1751933454.6873221,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "8bedde2dbca15219e1a3b95a68a8c0d26a92ba62",
"ctime": 1751933427.4892416,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 665568,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1748273472.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15496,
"uid": 0,
"version": "4278445899",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:17:22 -0400 (0:00:00.372) 0:01:43.522 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:17:22 -0400 (0:00:00.036) 0:01:43.558 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:17:22 -0400 (0:00:00.035) 0:01:43.593 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:17:22 -0400 (0:00:00.035) 0:01:43.628 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:17:22 -0400 (0:00:00.033) 0:01:43.662 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:17:22 -0400 (0:00:00.035) 0:01:43.698 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:17:22 -0400 (0:00:00.034) 0:01:43.732 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:17:22 -0400 (0:00:00.035) 0:01:43.768 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 07 July 2025 20:17:22 -0400 (0:00:00.033) 0:01:43.801 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": [
"quadlet-demo.yml"
],
"__podman_service_name": "quadlet-demo.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 07 July 2025 20:17:22 -0400 (0:00:00.058) 0:01:43.859 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 07 July 2025 20:17:22 -0400 (0:00:00.038) 0:01:43.898 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88
Monday 07 July 2025 20:17:22 -0400 (0:00:00.031) 0:01:43.929 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.kube",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106
Monday 07 July 2025 20:17:22 -0400 (0:00:00.078) 0:01:44.007 ***********
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113
Monday 07 July 2025 20:17:22 -0400 (0:00:00.039) 0:01:44.047 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Monday 07 July 2025 20:17:22 -0400 (0:00:00.081) 0:01:44.129 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Monday 07 July 2025 20:17:22 -0400 (0:00:00.032) 0:01:44.162 ***********
changed: [managed-node1] => {
"changed": true,
"enabled": false,
"failed_when_result": false,
"name": "quadlet-demo.service",
"state": "stopped",
"status": {
"AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
"ActiveEnterTimestamp": "Mon 2025-07-07 20:16:57 EDT",
"ActiveEnterTimestampMonotonic": "846162345",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "basic.target sysinit.target quadlet-demo-mysql.service -.mount network-online.target system.slice quadlet-demo-network.service systemd-journald.socket",
"AllowIsolate": "no",
"AssertResult": "yes",
"AssertTimestamp": "Mon 2025-07-07 20:16:57 EDT",
"AssertTimestampMonotonic": "845474529",
"Before": "shutdown.target multi-user.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "318221000",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Mon 2025-07-07 20:16:57 EDT",
"ConditionTimestampMonotonic": "845474524",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroup": "/system.slice/quadlet-demo.service",
"ControlGroupId": "83275",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "quadlet-demo.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveCPUs": "0-1",
"EffectiveMemoryNodes": "0",
"Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo.service",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "83120",
"ExecMainStartTimestamp": "Mon 2025-07-07 20:16:57 EDT",
"ExecMainStartTimestampMonotonic": "846162311",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[Mon 2025-07-07 20:16:57 EDT] ; stop_time=[n/a] ; pid=83099 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[Mon 2025-07-07 20:16:57 EDT] ; stop_time=[n/a] ; pid=83099 ; code=(null) ; status=0/0 }",
"ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FinalKillSignal": "9",
"FragmentPath": "/run/systemd/generator/quadlet-demo.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "18446744073709551615",
"IOReadOperations": "18446744073709551615",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "18446744073709551615",
"IOWriteOperations": "18446744073709551615",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "quadlet-demo.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Mon 2025-07-07 20:16:57 EDT",
"InactiveExitTimestampMonotonic": "845481469",
"InvocationID": "c56efec00de446c994852683541d7d95",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "13688",
"LimitNPROCSoft": "13688",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "13688",
"LimitSIGPENDINGSoft": "13688",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "83120",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "infinity",
"MemoryCurrent": "3194880",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "all",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "system.slice -.mount quadlet-demo-network.service sysinit.target quadlet-demo-mysql.service",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartKillSignal": "15",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo.kube",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestamp": "Mon 2025-07-07 20:16:57 EDT",
"StateChangeTimestampMonotonic": "846162345",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "4",
"TasksMax": "21900",
"TimeoutAbortUSec": "1min 30s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopFailureMode": "terminate",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "notify",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "network-online.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:34
Monday 07 July 2025 20:17:25 -0400 (0:00:02.225) 0:01:46.387 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1751933816.344481,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 8,
"charset": "us-ascii",
"checksum": "7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7",
"ctime": 1751933815.5864782,
"dev": 51713,
"device_type": 0,
"executable": false,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 645923365,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "text/plain",
"mode": "0644",
"mtime": 1751933815.3084774,
"nlink": 1,
"path": "/etc/containers/systemd/quadlet-demo.kube",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 456,
"uid": 0,
"version": "78563733",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": false,
"xoth": false,
"xusr": false
}
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:39
Monday 07 July 2025 20:17:25 -0400 (0:00:00.379) 0:01:46.766 ***********
included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Monday 07 July 2025 20:17:25 -0400 (0:00:00.103) 0:01:46.870 ***********
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Monday 07 July 2025 20:17:25 -0400 (0:00:00.350) 0:01:47.220 ***********
fatal: [managed-node1]: FAILED! => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result"
}
TASK [Debug] *******************************************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:207
Monday 07 July 2025 20:17:25 -0400 (0:00:00.039) 0:01:47.260 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": "exec 1>&2\nset -x\nset -o pipefail\nsystemctl list-units --plain -l --all | grep quadlet || :\nsystemctl list-unit-files --all | grep quadlet || :\nsystemctl list-units --plain --failed -l --all | grep quadlet || :\n",
"delta": "0:00:00.399925",
"end": "2025-07-07 20:17:26.666914",
"rc": 0,
"start": "2025-07-07 20:17:26.266989"
}
STDERR:
+ set -o pipefail
+ systemctl list-units --plain -l --all
+ grep quadlet
auth_test_1_quadlet.service loaded inactive dead auth_test_1_quadlet.service
quadlet-demo-mysql-volume.service loaded active exited quadlet-demo-mysql-volume.service
quadlet-demo-mysql.service loaded active running quadlet-demo-mysql.service
quadlet-demo-network.service loaded active exited quadlet-demo-network.service
quadlet-demo.service loaded inactive dead quadlet-demo.service
+ systemctl list-unit-files --all
+ grep quadlet
auth_test_1_quadlet.service generated -
quadlet-demo-mysql-volume.service generated -
quadlet-demo-mysql.service generated -
quadlet-demo-network.service generated -
quadlet-demo.service generated -
+ systemctl list-units --plain --failed -l --all
+ grep quadlet
+ :
TASK [Get journald] ************************************************************
task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:217
Monday 07 July 2025 20:17:26 -0400 (0:00:00.751) 0:01:48.011 ***********
fatal: [managed-node1]: FAILED! => {
"changed": false,
"cmd": [
"journalctl",
"-ex"
],
"delta": "0:00:00.039949",
"end": "2025-07-07 20:17:27.059651",
"failed_when_result": true,
"rc": 0,
"start": "2025-07-07 20:17:27.019702"
}
STDOUT:
Jul 07 20:12:50 managed-node1 podman[35538]: 2025-07-07 20:12:50.361351804 -0400 EDT m=+0.036190231 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)
Jul 07 20:12:50 managed-node1 podman[35538]: 2025-07-07 20:12:50.374459829 -0400 EDT m=+0.049298407 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)
Jul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container.
░░ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.
░░
░░ The job identifier is 4145.
Jul 07 20:12:50 managed-node1 podman[35538]: 2025-07-07 20:12:50.422744257 -0400 EDT m=+0.097582608 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)
Jul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35595]: This container is intended for podman CI testing
Jul 07 20:12:50 managed-node1 podman[35538]: 2025-07-07 20:12:50.42588667 -0400 EDT m=+0.100725056 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)
Jul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.
Jul 07 20:12:50 managed-node1 conmon[35595]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events
Jul 07 20:12:50 managed-node1 podman[35606]: 2025-07-07 20:12:50.477186688 -0400 EDT m=+0.035333022 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)
Jul 07 20:12:50 managed-node1 podman[35606]: 2025-07-07 20:12:50.491409753 -0400 EDT m=+0.049556363 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)
Jul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container.
░░ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.
░░
░░ The job identifier is 4150.
Jul 07 20:12:50 managed-node1 podman[35606]: 2025-07-07 20:12:50.552263881 -0400 EDT m=+0.110410323 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service)
Jul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.
Jul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35663]: This container is intended for podman CI testing
Jul 07 20:12:50 managed-node1 conmon[35663]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events
Jul 07 20:12:50 managed-node1 podman[35606]: 2025-07-07 20:12:50.557866714 -0400 EDT m=+0.116012971 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)
Jul 07 20:12:50 managed-node1 podman[35686]: 2025-07-07 20:12:50.620063202 -0400 EDT m=+0.048519999 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)
Jul 07 20:12:50 managed-node1 podman[35686]: 2025-07-07 20:12:50.633249443 -0400 EDT m=+0.061706156 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)
Jul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container.
░░ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.
░░
░░ The job identifier is 4155.
Jul 07 20:12:50 managed-node1 podman[35686]: 2025-07-07 20:12:50.692351489 -0400 EDT m=+0.120808216 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service)
Jul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35704]: This container is intended for podman CI testing
Jul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.
Jul 07 20:12:50 managed-node1 podman[35686]: 2025-07-07 20:12:50.69827612 -0400 EDT m=+0.126732782 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service)
Jul 07 20:12:50 managed-node1 python3.9[35698]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:12:50 managed-node1 podman[35708]: 2025-07-07 20:12:50.759561182 -0400 EDT m=+0.046849024 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)
Jul 07 20:12:50 managed-node1 podman[35708]: 2025-07-07 20:12:50.77735646 -0400 EDT m=+0.064644058 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)
Jul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container.
░░ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.
░░
░░ The job identifier is 4160.
Jul 07 20:12:50 managed-node1 podman[35708]: 2025-07-07 20:12:50.833194605 -0400 EDT m=+0.120482053 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)
Jul 07 20:12:50 managed-node1 podman[35708]: 2025-07-07 20:12:50.836115952 -0400 EDT m=+0.123403410 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)
Jul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35741]: This container is intended for podman CI testing
Jul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.
Jul 07 20:12:50 managed-node1 podman[35748]: 2025-07-07 20:12:50.869261632 -0400 EDT m=+0.023764777 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)
Jul 07 20:12:50 managed-node1 podman[35748]: 2025-07-07 20:12:50.881551249 -0400 EDT m=+0.036054336 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service)
Jul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container.
░░ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.
░░
░░ The job identifier is 4165.
Jul 07 20:12:50 managed-node1 podman[35748]: 2025-07-07 20:12:50.928402597 -0400 EDT m=+0.082905709 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service)
Jul 07 20:12:50 managed-node1 podman[35748]: 2025-07-07 20:12:50.931314876 -0400 EDT m=+0.085818011 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)
Jul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.
Jul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35759]: This container is intended for podman CI testing
Jul 07 20:12:50 managed-node1 conmon[35759]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events
Jul 07 20:12:50 managed-node1 podman[35763]: 2025-07-07 20:12:50.96332082 -0400 EDT m=+0.021467626 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)
Jul 07 20:12:50 managed-node1 podman[35763]: 2025-07-07 20:12:50.976815265 -0400 EDT m=+0.034961991 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)
Jul 07 20:12:51 managed-node1 systemd[1]: Started libcrun container.
░░ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.
░░
░░ The job identifier is 4170.
Jul 07 20:12:51 managed-node1 podman[35763]: 2025-07-07 20:12:51.044375911 -0400 EDT m=+0.102522824 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)
Jul 07 20:12:51 managed-node1 auth_test_1_kube-auth_test_1_kube[35798]: This container is intended for podman CI testing
Jul 07 20:12:51 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.
Jul 07 20:12:51 managed-node1 podman[35763]: 2025-07-07 20:12:51.049348235 -0400 EDT m=+0.107495274 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)
Jul 07 20:12:51 managed-node1 podman[35825]: 2025-07-07 20:12:51.092437769 -0400 EDT m=+0.029222529 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)
Jul 07 20:12:51 managed-node1 podman[35825]: 2025-07-07 20:12:51.107275694 -0400 EDT m=+0.044060322 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)
Jul 07 20:12:51 managed-node1 systemd[1]: Started libcrun container.
░░ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.
░░
░░ The job identifier is 4175.
Jul 07 20:12:51 managed-node1 podman[35825]: 2025-07-07 20:12:51.15929211 -0400 EDT m=+0.096076800 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)
Jul 07 20:12:51 managed-node1 podman[35825]: 2025-07-07 20:12:51.163527993 -0400 EDT m=+0.100312554 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)
Jul 07 20:12:51 managed-node1 auth_test_1_kube-auth_test_1_kube[35867]: This container is intended for podman CI testing
Jul 07 20:12:51 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.
Jul 07 20:12:51 managed-node1 conmon[35867]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events
Jul 07 20:12:51 managed-node1 podman[35894]: 2025-07-07 20:12:51.225315735 -0400 EDT m=+0.048836182 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)
Jul 07 20:12:51 managed-node1 podman[35894]: 2025-07-07 20:12:51.239386226 -0400 EDT m=+0.062906669 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)
Jul 07 20:12:51 managed-node1 python3.9[35925]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None
Jul 07 20:12:51 managed-node1 systemd[1]: Started libcrun container.
░░ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.
░░
░░ The job identifier is 4180.
Jul 07 20:12:51 managed-node1 podman[35894]: 2025-07-07 20:12:51.466820809 -0400 EDT m=+0.290341396 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service)
Jul 07 20:12:51 managed-node1 podman[35894]: 2025-07-07 20:12:51.470785368 -0400 EDT m=+0.294305786 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)
Jul 07 20:12:51 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.
Jul 07 20:12:51 managed-node1 auth_test_1_kube-auth_test_1_kube[35931]: This container is intended for podman CI testing
Jul 07 20:12:51 managed-node1 conmon[35931]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events
Jul 07 20:12:51 managed-node1 systemd[1]: Reloading.
Jul 07 20:12:51 managed-node1 podman[35937]: 2025-07-07 20:12:51.552775836 -0400 EDT m=+0.056358134 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)
Jul 07 20:12:51 managed-node1 podman[35937]: 2025-07-07 20:12:51.570347566 -0400 EDT m=+0.073929520 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)
Jul 07 20:12:51 managed-node1 systemd-rc-local-generator[35966]: /etc/rc.d/rc.local is not marked executable, skipping.
Jul 07 20:12:51 managed-node1 systemd[1]: Stopping A template for running K8s workloads via podman-kube-play...
░░ Subject: A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has begun execution.
░░
░░ The job identifier is 4185.
Jul 07 20:12:51 managed-node1 systemd[1]: Started libcrun container.
░░ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.
░░
░░ The job identifier is 4186.
Jul 07 20:12:51 managed-node1 podman[35937]: 2025-07-07 20:12:51.780283168 -0400 EDT m=+0.283865102 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)
Jul 07 20:12:51 managed-node1 auth_test_1_kube-auth_test_1_kube[35985]: This container is intended for podman CI testing
Jul 07 20:12:51 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.
Jul 07 20:12:51 managed-node1 podman[35937]: 2025-07-07 20:12:51.794156305 -0400 EDT m=+0.297738289 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service)
Jul 07 20:12:51 managed-node1 podman[35983]: 2025-07-07 20:12:51.805487564 -0400 EDT m=+0.042136278 pod stop 2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7 (image=, name=auth_test_1_kube)
Jul 07 20:12:51 managed-node1 podman[35983]: 2025-07-07 20:12:51.807535435 -0400 EDT m=+0.044184379 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)
Jul 07 20:12:51 managed-node1 systemd[1]: var-lib-containers-storage-overlay-6e8d6012bb184b7d9b4ca98710c13c4398e87e9e6ee6aa4809430868bbb52621-merged.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay-6e8d6012bb184b7d9b4ca98710c13c4398e87e9e6ee6aa4809430868bbb52621-merged.mount has successfully entered the 'dead' state.
Jul 07 20:12:51 managed-node1 podman[35983]: 2025-07-07 20:12:51.856254498 -0400 EDT m=+0.092903180 container cleanup b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)
Jul 07 20:12:51 managed-node1 systemd[1]: libpod-feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit libpod-feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631.scope has successfully entered the 'dead' state.
Jul 07 20:12:51 managed-node1 podman[35983]: 2025-07-07 20:12:51.866791467 -0400 EDT m=+0.103440452 container died feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631 (image=, name=2f0bc5d17106-infra, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service)
Jul 07 20:12:51 managed-node1 systemd[1]: run-r8208ec8fcd4d4202907a4db922529a4b.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit run-r8208ec8fcd4d4202907a4db922529a4b.scope has successfully entered the 'dead' state.
Jul 07 20:12:51 managed-node1 kernel: podman1: port 1(veth1) entered disabled state
Jul 07 20:12:51 managed-node1 kernel: veth1 (unregistering): left allmulticast mode
Jul 07 20:12:51 managed-node1 kernel: veth1 (unregistering): left promiscuous mode
Jul 07 20:12:51 managed-node1 kernel: podman1: port 1(veth1) entered disabled state
Jul 07 20:12:51 managed-node1 NetworkManager[642]: [1751933571.9144] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Jul 07 20:12:51 managed-node1 systemd[1]: Starting Network Manager Script Dispatcher Service...
░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit NetworkManager-dispatcher.service has begun execution.
░░
░░ The job identifier is 4192.
Jul 07 20:12:51 managed-node1 systemd[1]: Started Network Manager Script Dispatcher Service.
░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit NetworkManager-dispatcher.service has finished successfully.
░░
░░ The job identifier is 4192.
Jul 07 20:12:52 managed-node1 systemd[1]: run-netns-netns\x2d2411dc39\x2d3430\x2d1e50\x2dd025\x2d1a201717aa6d.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit run-netns-netns\x2d2411dc39\x2d3430\x2d1e50\x2dd025\x2d1a201717aa6d.mount has successfully entered the 'dead' state.
Jul 07 20:12:52 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631-rootfs-merge.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay\x2dcontainers-feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631-rootfs-merge.mount has successfully entered the 'dead' state.
Jul 07 20:12:52 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631-userdata-shm.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay\x2dcontainers-feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631-userdata-shm.mount has successfully entered the 'dead' state.
Jul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.071289258 -0400 EDT m=+0.307937965 container cleanup feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631 (image=, name=2f0bc5d17106-infra, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service)
Jul 07 20:12:52 managed-node1 systemd[1]: Removed slice cgroup machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice.
░░ Subject: A stop job for unit machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice has finished.
░░
░░ The job identifier is 4258 and the job result is done.
Jul 07 20:12:52 managed-node1 systemd[1]: machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice: Consumed 1.746s CPU time.
░░ Subject: Resources consumed by unit runtime
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice completed and consumed the indicated resources.
Jul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.098281515 -0400 EDT m=+0.334930231 container remove b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service)
Jul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.124590961 -0400 EDT m=+0.361239677 container remove feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631 (image=, name=2f0bc5d17106-infra, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service)
Jul 07 20:12:52 managed-node1 systemd[1]: machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice: Failed to open /run/systemd/transient/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice: No such file or directory
Jul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.134201698 -0400 EDT m=+0.370850385 pod remove 2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7 (image=, name=auth_test_1_kube)
Jul 07 20:12:52 managed-node1 podman[35983]: Pods stopped:
Jul 07 20:12:52 managed-node1 podman[35983]: 2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7
Jul 07 20:12:52 managed-node1 podman[35983]: Pods removed:
Jul 07 20:12:52 managed-node1 podman[35983]: 2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7
Jul 07 20:12:52 managed-node1 podman[35983]: Secrets removed:
Jul 07 20:12:52 managed-node1 podman[35983]: Volumes removed:
Jul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.138635694 -0400 EDT m=+0.375284640 container kill 782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc (image=, name=3ef6fcac6278-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service)
Jul 07 20:12:52 managed-node1 systemd[1]: libpod-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit libpod-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc.scope has successfully entered the 'dead' state.
Jul 07 20:12:52 managed-node1 conmon[20791]: conmon 782aa8ef72d65d91e61b : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/libpod-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc.scope/container/memory.events
Jul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.145356852 -0400 EDT m=+0.382005666 container died 782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc (image=, name=3ef6fcac6278-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service)
Jul 07 20:12:52 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc-rootfs-merge.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay\x2dcontainers-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc-rootfs-merge.mount has successfully entered the 'dead' state.
Jul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.214468795 -0400 EDT m=+0.451117504 container remove 782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc (image=, name=3ef6fcac6278-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service)
Jul 07 20:12:52 managed-node1 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has successfully entered the 'dead' state.
Jul 07 20:12:52 managed-node1 systemd[1]: Stopped A template for running K8s workloads via podman-kube-play.
░░ Subject: A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has finished.
░░
░░ The job identifier is 4185 and the job result is done.
Jul 07 20:12:52 managed-node1 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service: Consumed 36.014s CPU time.
░░ Subject: Resources consumed by unit runtime
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service completed and consumed the indicated resources.
Jul 07 20:12:52 managed-node1 python3.9[36210]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:12:52 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc-userdata-shm.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay\x2dcontainers-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc-userdata-shm.mount has successfully entered the 'dead' state.
Jul 07 20:12:53 managed-node1 python3.9[36361]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None
Jul 07 20:12:53 managed-node1 python3.9[36361]: ansible-containers.podman.podman_play version: 5.5.1, kube file /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml
Jul 07 20:12:53 managed-node1 python3.9[36523]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:12:54 managed-node1 python3.9[36672]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:12:56 managed-node1 python3.9[36823]: ansible-systemd Invoked with name=auth_test_1_quadlet.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None
Jul 07 20:12:56 managed-node1 systemd[1]: Reloading.
Jul 07 20:12:56 managed-node1 systemd-rc-local-generator[36842]: /etc/rc.d/rc.local is not marked executable, skipping.
Jul 07 20:12:57 managed-node1 python3.9[37008]: ansible-stat Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:12:57 managed-node1 python3.9[37308]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:12:59 managed-node1 python3.9[37607]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:00 managed-node1 python3.9[37762]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:01 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.
Jul 07 20:13:03 managed-node1 python3.9[37913]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:05 managed-node1 python3.9[38064]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:06 managed-node1 python3.9[38215]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:07 managed-node1 python3.9[38366]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:08 managed-node1 python3.9[38516]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None
Jul 07 20:13:08 managed-node1 python3.9[38667]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:09 managed-node1 python3.9[38816]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:10 managed-node1 python3.9[38965]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:12 managed-node1 python3.9[39116]: ansible-systemd Invoked with name=auth_test_1_quadlet.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None
Jul 07 20:13:12 managed-node1 systemd[1]: Reloading.
Jul 07 20:13:12 managed-node1 systemd-rc-local-generator[39137]: /etc/rc.d/rc.local is not marked executable, skipping.
Jul 07 20:13:12 managed-node1 python3.9[39301]: ansible-stat Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:13 managed-node1 python3.9[39601]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:15 managed-node1 python3.9[39900]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:16 managed-node1 python3.9[40054]: ansible-getent Invoked with database=passwd key=auth_test_user1 fail_key=False service=None split=None
Jul 07 20:13:17 managed-node1 python3.9[40204]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:17 managed-node1 python3.9[40354]: ansible-user Invoked with name=auth_test_user1 state=absent non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None
Jul 07 20:13:18 managed-node1 python3.9[40504]: ansible-file Invoked with path=/home/auth_test_user1 state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:18 managed-node1 python3.9[40653]: ansible-ansible.legacy.command Invoked with _raw_params=podman inspect podman_registry --format '{{range .}}{{range .Mounts}}{{if eq .Type "volume"}}{{.Name}}{{end}}{{end}}{{end}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:19 managed-node1 python3.9[40810]: ansible-ansible.legacy.command Invoked with _raw_params=podman rm -f podman_registry _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:19 managed-node1 systemd[1]: libpod-2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit libpod-2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b.scope has successfully entered the 'dead' state.
Jul 07 20:13:19 managed-node1 podman[40811]: 2025-07-07 20:13:19.265873387 -0400 EDT m=+0.045935077 container died 2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b (image=quay.io/libpod/registry:2.8.2, name=podman_registry)
Jul 07 20:13:19 managed-node1 kernel: podman0: port 1(veth0) entered disabled state
Jul 07 20:13:19 managed-node1 kernel: veth0 (unregistering): left allmulticast mode
Jul 07 20:13:19 managed-node1 kernel: veth0 (unregistering): left promiscuous mode
Jul 07 20:13:19 managed-node1 kernel: podman0: port 1(veth0) entered disabled state
Jul 07 20:13:19 managed-node1 NetworkManager[642]: [1751933599.3183] device (podman0): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Jul 07 20:13:19 managed-node1 systemd[1]: Starting Network Manager Script Dispatcher Service...
░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit NetworkManager-dispatcher.service has begun execution.
░░
░░ The job identifier is 4260.
Jul 07 20:13:19 managed-node1 systemd[1]: Started Network Manager Script Dispatcher Service.
░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit NetworkManager-dispatcher.service has finished successfully.
░░
░░ The job identifier is 4260.
Jul 07 20:13:19 managed-node1 systemd[1]: run-netns-netns\x2da357660f\x2d51e9\x2def21\x2deb3c\x2d7f281ab8e18b.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit run-netns-netns\x2da357660f\x2d51e9\x2def21\x2deb3c\x2d7f281ab8e18b.mount has successfully entered the 'dead' state.
Jul 07 20:13:19 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b-userdata-shm.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay\x2dcontainers-2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b-userdata-shm.mount has successfully entered the 'dead' state.
Jul 07 20:13:19 managed-node1 systemd[1]: var-lib-containers-storage-overlay-112b09f11ff8fbba8e451f27769fa8e700d9bb89038833f136b238148dff37fa-merged.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay-112b09f11ff8fbba8e451f27769fa8e700d9bb89038833f136b238148dff37fa-merged.mount has successfully entered the 'dead' state.
Jul 07 20:13:19 managed-node1 podman[40811]: 2025-07-07 20:13:19.49814603 -0400 EDT m=+0.278207625 container remove 2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b (image=quay.io/libpod/registry:2.8.2, name=podman_registry)
Jul 07 20:13:19 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:13:19 managed-node1 systemd[1]: libpod-conmon-2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit libpod-conmon-2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b.scope has successfully entered the 'dead' state.
Jul 07 20:13:19 managed-node1 python3.9[41023]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume rm 1e074241011384a8157c17bad24c616d5279de9c3f24494baf6b8341065b25b1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:19 managed-node1 podman[41024]: 2025-07-07 20:13:19.910047174 -0400 EDT m=+0.025683882 volume remove 1e074241011384a8157c17bad24c616d5279de9c3f24494baf6b8341065b25b1
Jul 07 20:13:20 managed-node1 python3.9[41180]: ansible-file Invoked with path=/tmp/lsr_g7zmh6pf_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:22 managed-node1 python3.9[41378]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Jul 07 20:13:23 managed-node1 python3.9[41553]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:23 managed-node1 python3.9[41702]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:25 managed-node1 python3.9[42000]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:26 managed-node1 python3.9[42155]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None
Jul 07 20:13:26 managed-node1 python3.9[42305]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:28 managed-node1 python3.9[42456]: ansible-tempfile Invoked with state=directory prefix=lsr_podman_config_ suffix= path=None
Jul 07 20:13:28 managed-node1 python3.9[42605]: ansible-ansible.legacy.command Invoked with _raw_params=tar --ignore-failed-read -c -P -v -p -f /tmp/lsr_podman_config_630o0ml1/backup.tar /etc/containers/containers.conf.d/50-systemroles.conf /etc/containers/registries.conf.d/50-systemroles.conf /etc/containers/storage.conf /etc/containers/policy.json _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:29 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.
Jul 07 20:13:29 managed-node1 python3.9[42755]: ansible-user Invoked with name=user1 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None
Jul 07 20:13:29 managed-node1 useradd[42757]: new group: name=user1, GID=1000
Jul 07 20:13:29 managed-node1 useradd[42757]: new user: name=user1, UID=1000, GID=1000, home=/home/user1, shell=/bin/bash, from=/dev/pts/0
Jul 07 20:13:31 managed-node1 python3.9[43061]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:32 managed-node1 python3.9[43217]: ansible-getent Invoked with database=passwd key=user1 fail_key=False service=None split=None
Jul 07 20:13:33 managed-node1 python3.9[43367]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:33 managed-node1 python3.9[43518]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:33 managed-node1 python3.9[43668]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:34 managed-node1 python3.9[43818]: ansible-file Invoked with path=/home/user1/.config/containers/containers.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:35 managed-node1 python3.9[43967]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:13:35 managed-node1 python3.9[44087]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933614.9410055-13310-24639621612646/.source.conf _original_basename=.lefyhyea follow=False checksum=b1776092f2908d76e11fd6af87267469b2c17d5a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:36 managed-node1 python3.9[44236]: ansible-file Invoked with path=/home/user1/.config/containers/registries.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:36 managed-node1 python3.9[44385]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:13:36 managed-node1 python3.9[44505]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933616.3031077-13365-95635820072900/.source.conf _original_basename=.79pds_qw follow=False checksum=fde25488ce7040f1639af7bfc88ed125318cc0b0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:37 managed-node1 python3.9[44654]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:37 managed-node1 python3.9[44803]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:13:38 managed-node1 python3.9[44923]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/storage.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933617.4859679-13409-167338433742499/.source.conf _original_basename=.5__9m3z1 follow=False checksum=38f015f4780579bd388dd955b42916199fd7fe19 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:38 managed-node1 python3.9[45072]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:38 managed-node1 python3.9[45221]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:39 managed-node1 python3.9[45370]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:13:39 managed-node1 python3.9[45490]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/policy.json owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933619.023253-13468-75507327963883/.source.json _original_basename=.gsbubo1d follow=False checksum=6746c079ad563b735fc39f73d4876654b80b0a0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:40 managed-node1 python3.9[45639]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:40 managed-node1 python3.9[45790]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:41 managed-node1 python3.9[45940]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:41 managed-node1 python3.9[46090]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:43 managed-node1 python3.9[46508]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:43 managed-node1 python3.9[46659]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:44 managed-node1 python3.9[46809]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:45 managed-node1 python3.9[46959]: ansible-stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:45 managed-node1 python3.9[47110]: ansible-stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:45 managed-node1 python3.9[47261]: ansible-stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:46 managed-node1 python3.9[47412]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:46 managed-node1 python3.9[47563]: ansible-ansible.legacy.command Invoked with _raw_params=grep 'container_name_as_hostname[ ]*=[ ]*true' /home/user1/.config/containers/containers.conf.d/50-systemroles.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:48 managed-node1 python3.9[47862]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:49 managed-node1 python3.9[48017]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:49 managed-node1 python3.9[48168]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:49 managed-node1 python3.9[48318]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:50 managed-node1 python3.9[48468]: ansible-file Invoked with path=/home/user1/.config/containers/containers.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:51 managed-node1 python3.9[48617]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:13:51 managed-node1 python3.9[48692]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf _original_basename=.yuv1oz8a recurse=False state=file path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:52 managed-node1 python3.9[48841]: ansible-file Invoked with path=/home/user1/.config/containers/registries.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:52 managed-node1 python3.9[48990]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:13:52 managed-node1 python3.9[49065]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf _original_basename=.t7udz4o_ recurse=False state=file path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:53 managed-node1 python3.9[49214]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:53 managed-node1 python3.9[49363]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:13:53 managed-node1 python3.9[49438]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/storage.conf _original_basename=.acx818hv recurse=False state=file path=/home/user1/.config/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:54 managed-node1 python3.9[49587]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:54 managed-node1 python3.9[49736]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:55 managed-node1 python3.9[49887]: ansible-slurp Invoked with path=/home/user1/.config/containers/policy.json src=/home/user1/.config/containers/policy.json
Jul 07 20:13:56 managed-node1 python3.9[50036]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:56 managed-node1 python3.9[50187]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:56 managed-node1 python3.9[50337]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:57 managed-node1 python3.9[50487]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:59 managed-node1 python3.9[50860]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:59 managed-node1 python3.9[51011]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:14:00 managed-node1 python3.9[51161]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:14:01 managed-node1 python3.9[51311]: ansible-stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:01 managed-node1 python3.9[51462]: ansible-stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:01 managed-node1 python3.9[51613]: ansible-stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:02 managed-node1 python3.9[51764]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:03 managed-node1 python3.9[52064]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:14:04 managed-node1 python3.9[52219]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None
Jul 07 20:14:05 managed-node1 python3.9[52369]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:05 managed-node1 python3.9[52520]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:06 managed-node1 python3.9[52669]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:14:06 managed-node1 python3.9[52789]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/containers.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933646.0899522-14266-164202167319133/.source.conf _original_basename=.f55n9smu follow=False checksum=b1776092f2908d76e11fd6af87267469b2c17d5a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:07 managed-node1 python3.9[52938]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:07 managed-node1 python3.9[53087]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:14:07 managed-node1 python3.9[53207]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/registries.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933647.2349114-14295-264742941497318/.source.conf _original_basename=.kaocflcp follow=False checksum=fde25488ce7040f1639af7bfc88ed125318cc0b0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:08 managed-node1 python3.9[53356]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:08 managed-node1 python3.9[53505]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:14:08 managed-node1 python3.9[53627]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/storage.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933648.3819814-14329-124138118490068/.source.conf _original_basename=.cw2ofq3l follow=False checksum=38f015f4780579bd388dd955b42916199fd7fe19 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:09 managed-node1 python3.9[53776]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:09 managed-node1 python3.9[53925]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:10 managed-node1 python3.9[54076]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json
Jul 07 20:14:10 managed-node1 python3.9[54225]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:14:10 managed-node1 python3.9[54347]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/policy.json owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933650.3431063-14385-203115853451002/.source.json _original_basename=.9ge15xwj follow=False checksum=6746c079ad563b735fc39f73d4876654b80b0a0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:11 managed-node1 python3.9[54496]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:12 managed-node1 python3.9[54647]: ansible-file Invoked with path=/root/.config/containers state=directory owner=root group=0 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:14 managed-node1 python3.9[55067]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:15 managed-node1 python3.9[55218]: ansible-stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:15 managed-node1 python3.9[55369]: ansible-stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:16 managed-node1 python3.9[55520]: ansible-stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:16 managed-node1 python3.9[55671]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:18 managed-node1 python3.9[55971]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:14:19 managed-node1 python3.9[56126]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:20 managed-node1 python3.9[56277]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:21 managed-node1 python3.9[56426]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:14:21 managed-node1 python3.9[56501]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/containers.conf.d/50-systemroles.conf _original_basename=.05q0dgxv recurse=False state=file path=/etc/containers/containers.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:22 managed-node1 python3.9[56650]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:22 managed-node1 python3.9[56799]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:14:22 managed-node1 python3.9[56874]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/registries.conf.d/50-systemroles.conf _original_basename=.p8krhe9y recurse=False state=file path=/etc/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:23 managed-node1 python3.9[57023]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:23 managed-node1 python3.9[57172]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:14:24 managed-node1 python3.9[57247]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/storage.conf _original_basename=.41qn4gp5 recurse=False state=file path=/etc/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:24 managed-node1 python3.9[57396]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:24 managed-node1 python3.9[57545]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:25 managed-node1 python3.9[57696]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json
Jul 07 20:14:26 managed-node1 python3.9[57845]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:27 managed-node1 python3.9[57996]: ansible-file Invoked with path=/root/.config/containers state=directory owner=root group=0 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:28 managed-node1 python3.9[58369]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:29 managed-node1 python3.9[58520]: ansible-stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:30 managed-node1 python3.9[58671]: ansible-stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:30 managed-node1 python3.9[58822]: ansible-stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:30 managed-node1 python3.9[58973]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:31 managed-node1 python3.9[59124]: ansible-slurp Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf src=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf
Jul 07 20:14:32 managed-node1 python3.9[59273]: ansible-slurp Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf src=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf
Jul 07 20:14:32 managed-node1 python3.9[59422]: ansible-slurp Invoked with path=/home/user1/.config/containers/storage.conf src=/home/user1/.config/containers/storage.conf
Jul 07 20:14:33 managed-node1 python3.9[59571]: ansible-slurp Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf src=/etc/containers/containers.conf.d/50-systemroles.conf
Jul 07 20:14:33 managed-node1 python3.9[59720]: ansible-slurp Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf src=/etc/containers/registries.conf.d/50-systemroles.conf
Jul 07 20:14:34 managed-node1 python3.9[59869]: ansible-slurp Invoked with path=/etc/containers/storage.conf src=/etc/containers/storage.conf
Jul 07 20:14:35 managed-node1 python3.9[60167]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:14:37 managed-node1 python3.9[60322]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:38 managed-node1 python3.9[60473]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:38 managed-node1 python3.9[60622]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:14:38 managed-node1 python3.9[60744]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/containers.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933678.2505736-15338-238322287102956/.source.conf _original_basename=.8f6zzn7v follow=False checksum=9694c1d1c700a6435eecf4066b052584f4ee94c0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:39 managed-node1 python3.9[60893]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:39 managed-node1 python3.9[61042]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:14:40 managed-node1 python3.9[61117]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/registries.conf.d/50-systemroles.conf _original_basename=.ttqitrgk recurse=False state=file path=/etc/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:40 managed-node1 python3.9[61266]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:41 managed-node1 python3.9[61415]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:14:41 managed-node1 python3.9[61490]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/storage.conf _original_basename=.chl3shxe recurse=False state=file path=/etc/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:41 managed-node1 python3.9[61639]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:42 managed-node1 python3.9[61788]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:42 managed-node1 python3.9[61939]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json
Jul 07 20:14:43 managed-node1 python3.9[62088]: ansible-slurp Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf src=/etc/containers/containers.conf.d/50-systemroles.conf
Jul 07 20:14:43 managed-node1 python3.9[62237]: ansible-file Invoked with state=absent path=/etc/containers/containers.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:44 managed-node1 python3.9[62386]: ansible-file Invoked with state=absent path=/etc/containers/registries.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:44 managed-node1 python3.9[62535]: ansible-file Invoked with state=absent path=/etc/containers/storage.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:44 managed-node1 python3.9[62684]: ansible-file Invoked with state=absent path=/etc/containers/policy.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:45 managed-node1 python3.9[62833]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:45 managed-node1 python3.9[62982]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:45 managed-node1 python3.9[63131]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/storage.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:46 managed-node1 python3.9[63280]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/policy.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:46 managed-node1 python3.9[63429]: ansible-file Invoked with state=absent path=/root/.config/containers/auth.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:46 managed-node1 python3.9[63578]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/auth.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:47 managed-node1 python3.9[63727]: ansible-ansible.legacy.command Invoked with _raw_params=tar xfvpP /tmp/lsr_podman_config_630o0ml1/backup.tar _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:14:47 managed-node1 python3.9[63877]: ansible-file Invoked with state=absent path=/tmp/lsr_podman_config_630o0ml1 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:49 managed-node1 python3.9[64075]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version', 'os_family'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Jul 07 20:14:50 managed-node1 python3.9[64226]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:50 managed-node1 python3.9[64375]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:52 managed-node1 python3.9[64673]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:14:53 managed-node1 python3.9[64828]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None
Jul 07 20:14:53 managed-node1 python3.9[64978]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:57 managed-node1 python3.9[65178]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Jul 07 20:15:00 managed-node1 python3.9[65353]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:00 managed-node1 python3.9[65502]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:02 managed-node1 python3.9[65800]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:15:03 managed-node1 python3.9[65955]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None
Jul 07 20:15:04 managed-node1 python3.9[66105]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:09 managed-node1 python3.9[66305]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Jul 07 20:15:10 managed-node1 python3.9[66480]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:10 managed-node1 python3.9[66629]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:12 managed-node1 python3.9[66927]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:15:13 managed-node1 python3.9[67082]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None
Jul 07 20:15:14 managed-node1 python3.9[67232]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:16 managed-node1 python3.9[67383]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:17 managed-node1 python3.9[67534]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:15:18 managed-node1 python3.9[67683]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/nopull.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:15:18 managed-node1 python3.9[67803]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933717.8906143-17219-30430442070937/.source.container dest=/etc/containers/systemd/nopull.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=670d64fc68a9768edb20cad26df2acc703542d85 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:15:20 managed-node1 python3.9[68101]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:15:21 managed-node1 python3.9[68256]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:23 managed-node1 python3.9[68407]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:15:24 managed-node1 podman[68566]: 2025-07-07 20:15:24.692519338 -0400 EDT m=+0.023060903 image pull-error this_is_a_bogus_image:latest short-name resolution enforced but cannot prompt without a TTY
Jul 07 20:15:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:15:25 managed-node1 python3.9[68721]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:15:25 managed-node1 python3.9[68870]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/bogus.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:15:25 managed-node1 python3.9[68990]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933725.2913904-17409-127642446506455/.source.container dest=/etc/containers/systemd/bogus.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=1d087e679d135214e8ac9ccaf33b2222916efb7f backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:15:28 managed-node1 python3.9[69288]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:15:29 managed-node1 python3.9[69443]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:32 managed-node1 python3.9[69594]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:33 managed-node1 python3.9[69745]: ansible-systemd Invoked with name=nopull.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None
Jul 07 20:15:33 managed-node1 python3.9[69895]: ansible-stat Invoked with path=/etc/containers/systemd/nopull.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:34 managed-node1 python3.9[70195]: ansible-ansible.legacy.command Invoked with _raw_params=set -x
set -o pipefail
exec 1>&2
#podman volume rm --all
#podman network prune -f
podman volume ls
podman network ls
podman secret ls
podman container ls
podman pod ls
podman images
systemctl list-units | grep quadlet
_uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:15:34 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:15:34 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:15:35 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:15:36 managed-node1 python3.9[70540]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:15:37 managed-node1 python3.9[70695]: ansible-getent Invoked with database=passwd key=user_quadlet_basic fail_key=False service=None split=None
Jul 07 20:15:37 managed-node1 python3.9[70845]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:15:39 managed-node1 python3.9[71044]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Jul 07 20:15:40 managed-node1 python3.9[71219]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:40 managed-node1 python3.9[71368]: ansible-ansible.legacy.command Invoked with _raw_params=systemctl is-system-running _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:15:41 managed-node1 python3.9[71518]: ansible-ansible.legacy.dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Jul 07 20:15:42 managed-node1 python3.9[71668]: ansible-ansible.legacy.dnf Invoked with name=['certmonger', 'python3-packaging'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Jul 07 20:15:44 managed-node1 python3.9[71818]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:15:44 managed-node1 python3.9[71967]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:15:45 managed-node1 python3.9[72116]: ansible-ansible.legacy.systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Jul 07 20:15:46 managed-node1 python3.9[72267]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=#
# Ansible managed
#
# system_role:certificate
booted=True provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 certmonger[72282]: Certificate in file "/etc/pki/tls/certs/quadlet_demo.crt" issued by CA and saved.
Jul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:46 managed-node1 python3.9[72431]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt
Jul 07 20:15:47 managed-node1 python3.9[72580]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key
Jul 07 20:15:47 managed-node1 python3.9[72729]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt
Jul 07 20:15:48 managed-node1 python3.9[72878]: ansible-ansible.legacy.command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:15:48 managed-node1 certmonger[12020]: 2025-07-07 20:15:48 [12020] Wrote to /var/lib/certmonger/requests/20250708001546
Jul 07 20:15:48 managed-node1 python3.9[73028]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:15:48 managed-node1 python3.9[73177]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:15:49 managed-node1 python3.9[73326]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:15:49 managed-node1 python3.9[73475]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:50 managed-node1 python3.9[73624]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:51 managed-node1 python3.9[73922]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:15:52 managed-node1 python3.9[74077]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None
Jul 07 20:15:52 managed-node1 python3.9[74227]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:54 managed-node1 python3.9[74378]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:54 managed-node1 python3.9[74527]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:55 managed-node1 python3.9[74676]: ansible-ansible.legacy.command Invoked with _raw_params=systemctl is-system-running _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:15:55 managed-node1 python3.9[74826]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Jul 07 20:15:57 managed-node1 python3.9[74976]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None
Jul 07 20:15:57 managed-node1 python3.9[75127]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Jul 07 20:15:57 managed-node1 systemd[1]: Reloading.
Jul 07 20:15:57 managed-node1 systemd-rc-local-generator[75147]: /etc/rc.d/rc.local is not marked executable, skipping.
Jul 07 20:15:58 managed-node1 systemd[1]: Starting firewalld - dynamic firewall daemon...
░░ Subject: A start job for unit firewalld.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit firewalld.service has begun execution.
░░
░░ The job identifier is 4326.
Jul 07 20:15:58 managed-node1 systemd[1]: Started firewalld - dynamic firewall daemon.
░░ Subject: A start job for unit firewalld.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit firewalld.service has finished successfully.
░░
░░ The job identifier is 4326.
Jul 07 20:15:58 managed-node1 quadlet-generator[75135]: Warning: bogus.container specifies the image "this_is_a_bogus_image" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.
Jul 07 20:15:58 managed-node1 kernel: Warning: Unmaintained driver is detected: ip_set
Jul 07 20:15:58 managed-node1 kernel: Warning: Unmaintained driver is detected: ip_set_init
Jul 07 20:15:59 managed-node1 python3.9[75351]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None
Jul 07 20:15:59 managed-node1 python3.9[75500]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None
Jul 07 20:15:59 managed-node1 rsyslogd[809]: imjournal: journal files changed, reloading... [v8.2412.0-2.el9 try https://www.rsyslog.com/e/0 ]
Jul 07 20:16:02 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:16:02 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:16:02 managed-node1 podman[75670]: 2025-07-07 20:16:02.087398298 -0400 EDT m=+0.019911969 secret create ea8c34f75eeeee13a33c9ef0e
Jul 07 20:16:03 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:16:03 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:16:03 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:16:03 managed-node1 podman[75848]: 2025-07-07 20:16:03.507704545 -0400 EDT m=+0.017383206 secret create cb2d4c65044c524adeead96e5
Jul 07 20:16:04 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:16:05 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:16:05 managed-node1 podman[76024]: 2025-07-07 20:16:05.052661564 -0400 EDT m=+0.019543154 secret create a95acde29e8944984a9c6b05c
Jul 07 20:16:05 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:16:06 managed-node1 python3.9[76180]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:16:07 managed-node1 python3.9[76331]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:08 managed-node1 python3.9[76480]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:16:08 managed-node1 python3.9[76600]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933767.7445745-18733-5242759390053/.source.network dest=/etc/containers/systemd/quadlet-demo.network owner=root group=0 mode=0644 _original_basename=quadlet-demo.network follow=False checksum=e57c08d49aff4bae8daab138d913aeddaa8682a0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:09 managed-node1 python3.9[76749]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Jul 07 20:16:09 managed-node1 systemd[1]: Reloading.
Jul 07 20:16:09 managed-node1 quadlet-generator[76755]: Warning: bogus.container specifies the image "this_is_a_bogus_image" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.
Jul 07 20:16:09 managed-node1 systemd-rc-local-generator[76767]: /etc/rc.d/rc.local is not marked executable, skipping.
Jul 07 20:16:09 managed-node1 python3.9[76932]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None
Jul 07 20:16:09 managed-node1 systemd[1]: Starting quadlet-demo-network.service...
░░ Subject: A start job for unit quadlet-demo-network.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit quadlet-demo-network.service has begun execution.
░░
░░ The job identifier is 4397.
Jul 07 20:16:09 managed-node1 podman[76936]: 2025-07-07 20:16:09.813688266 -0400 EDT m=+0.023505174 network create e77a522f8940bc72322e47b9594fa31c2a9239c87a24a572992d50c2070722ac (name=systemd-quadlet-demo, type=bridge)
Jul 07 20:16:09 managed-node1 quadlet-demo-network[76936]: systemd-quadlet-demo
Jul 07 20:16:09 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:16:09 managed-node1 systemd[1]: Finished quadlet-demo-network.service.
░░ Subject: A start job for unit quadlet-demo-network.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit quadlet-demo-network.service has finished successfully.
░░
░░ The job identifier is 4397.
Jul 07 20:16:10 managed-node1 python3.9[77091]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:16:12 managed-node1 python3.9[77242]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:12 managed-node1 python3.9[77391]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:16:13 managed-node1 python3.9[77511]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933772.715237-18900-74746594643675/.source.volume dest=/etc/containers/systemd/quadlet-demo-mysql.volume owner=root group=0 mode=0644 _original_basename=quadlet-demo-mysql.volume follow=False checksum=585f8cbdf0ec73000f9227dcffbef71e9552ea4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:13 managed-node1 python3.9[77660]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Jul 07 20:16:13 managed-node1 systemd[1]: Reloading.
Jul 07 20:16:14 managed-node1 quadlet-generator[77666]: Warning: bogus.container specifies the image "this_is_a_bogus_image" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.
Jul 07 20:16:14 managed-node1 systemd-rc-local-generator[77678]: /etc/rc.d/rc.local is not marked executable, skipping.
Jul 07 20:16:14 managed-node1 python3.9[77843]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None
Jul 07 20:16:14 managed-node1 systemd[1]: Starting quadlet-demo-mysql-volume.service...
░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit quadlet-demo-mysql-volume.service has begun execution.
░░
░░ The job identifier is 4468.
Jul 07 20:16:14 managed-node1 podman[77847]: 2025-07-07 20:16:14.758975109 -0400 EDT m=+0.027693599 volume create systemd-quadlet-demo-mysql
Jul 07 20:16:14 managed-node1 quadlet-demo-mysql-volume[77847]: systemd-quadlet-demo-mysql
Jul 07 20:16:14 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:16:14 managed-node1 systemd[1]: Finished quadlet-demo-mysql-volume.service.
░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit quadlet-demo-mysql-volume.service has finished successfully.
░░
░░ The job identifier is 4468.
Jul 07 20:16:15 managed-node1 python3.9[78004]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:16:17 managed-node1 python3.9[78155]: ansible-file Invoked with path=/tmp/quadlet_demo state=directory owner=root group=root mode=0777 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:17 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:16:23 managed-node1 podman[78312]: 2025-07-07 20:16:23.966027689 -0400 EDT m=+5.992362773 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6
Jul 07 20:16:23 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:16:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:16:24 managed-node1 python3.9[78638]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:24 managed-node1 python3.9[78787]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:16:25 managed-node1 python3.9[78907]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo-mysql.container owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933784.6711764-19256-243645638611479/.source.container _original_basename=.khut39a4 follow=False checksum=ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:25 managed-node1 python3.9[79056]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Jul 07 20:16:25 managed-node1 systemd[1]: Reloading.
Jul 07 20:16:25 managed-node1 quadlet-generator[79062]: Warning: bogus.container specifies the image "this_is_a_bogus_image" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.
Jul 07 20:16:25 managed-node1 systemd-rc-local-generator[79074]: /etc/rc.d/rc.local is not marked executable, skipping.
Jul 07 20:16:26 managed-node1 python3.9[79239]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None
Jul 07 20:16:26 managed-node1 systemd[1]: Starting quadlet-demo-mysql.service...
░░ Subject: A start job for unit quadlet-demo-mysql.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit quadlet-demo-mysql.service has begun execution.
░░
░░ The job identifier is 4539.
Jul 07 20:16:26 managed-node1 podman[79243]: 2025-07-07 20:16:26.601319498 -0400 EDT m=+0.041646112 container create f33934011335e1e05e67f30d3833447877c8c3f24563f59e1979b85508143a46 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service)
Jul 07 20:16:26 managed-node1 systemd[1]: var-lib-containers-storage-overlay-volatile\x2dcheck3897929246-merged.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay-volatile\x2dcheck3897929246-merged.mount has successfully entered the 'dead' state.
Jul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.6259] manager: (podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/9)
Jul 07 20:16:26 managed-node1 kernel: podman2: port 1(veth0) entered blocking state
Jul 07 20:16:26 managed-node1 kernel: podman2: port 1(veth0) entered disabled state
Jul 07 20:16:26 managed-node1 kernel: veth0: entered allmulticast mode
Jul 07 20:16:26 managed-node1 kernel: veth0: entered promiscuous mode
Jul 07 20:16:26 managed-node1 kernel: podman2: port 1(veth0) entered blocking state
Jul 07 20:16:26 managed-node1 kernel: podman2: port 1(veth0) entered forwarding state
Jul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.6393] device (veth0): carrier: link connected
Jul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.6396] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/10)
Jul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.6400] device (podman2): carrier: link connected
Jul 07 20:16:26 managed-node1 systemd-udevd[79268]: Network interface NamePolicy= disabled on kernel command line.
Jul 07 20:16:26 managed-node1 systemd-udevd[79270]: Network interface NamePolicy= disabled on kernel command line.
Jul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.6722] device (podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Jul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.6735] device (podman2): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external')
Jul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.6747] device (podman2): Activation: starting connection 'podman2' (5d5ba419-8b7f-4670-b0eb-b66c00c4f6b0)
Jul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.6749] device (podman2): state change: disconnected -> prepare (reason 'none', managed-type: 'external')
Jul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.6756] device (podman2): state change: prepare -> config (reason 'none', managed-type: 'external')
Jul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.6771] device (podman2): state change: config -> ip-config (reason 'none', managed-type: 'external')
Jul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.6775] device (podman2): state change: ip-config -> ip-check (reason 'none', managed-type: 'external')
Jul 07 20:16:26 managed-node1 podman[79243]: 2025-07-07 20:16:26.585556736 -0400 EDT m=+0.025883528 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6
Jul 07 20:16:26 managed-node1 systemd[1]: Starting Network Manager Script Dispatcher Service...
░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit NetworkManager-dispatcher.service has begun execution.
░░
░░ The job identifier is 4613.
Jul 07 20:16:26 managed-node1 systemd[1]: Started Network Manager Script Dispatcher Service.
░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit NetworkManager-dispatcher.service has finished successfully.
░░
░░ The job identifier is 4613.
Jul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.7052] device (podman2): state change: ip-check -> secondaries (reason 'none', managed-type: 'external')
Jul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.7056] device (podman2): state change: secondaries -> activated (reason 'none', managed-type: 'external')
Jul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.7066] device (podman2): Activation: successful, device activated.
Jul 07 20:16:26 managed-node1 systemd[1]: Started /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run.
░░ Subject: A start job for unit run-r79a358c8821241c5893b4f6e7846886b.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit run-r79a358c8821241c5893b4f6e7846886b.scope has finished successfully.
░░
░░ The job identifier is 4679.
Jul 07 20:16:26 managed-node1 systemd[1]: Started /usr/bin/podman healthcheck run f33934011335e1e05e67f30d3833447877c8c3f24563f59e1979b85508143a46.
░░ Subject: A start job for unit f33934011335e1e05e67f30d3833447877c8c3f24563f59e1979b85508143a46-7e0e2196b6bede63.timer has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit f33934011335e1e05e67f30d3833447877c8c3f24563f59e1979b85508143a46-7e0e2196b6bede63.timer has finished successfully.
░░
░░ The job identifier is 4683.
Jul 07 20:16:26 managed-node1 podman[79243]: 2025-07-07 20:16:26.838601985 -0400 EDT m=+0.278928697 container init f33934011335e1e05e67f30d3833447877c8c3f24563f59e1979b85508143a46 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service)
Jul 07 20:16:26 managed-node1 systemd[1]: Started quadlet-demo-mysql.service.
░░ Subject: A start job for unit quadlet-demo-mysql.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit quadlet-demo-mysql.service has finished successfully.
░░
░░ The job identifier is 4539.
Jul 07 20:16:26 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:26+00:00 [Note] [Entrypoint]: Entrypoint script for MySQL Server 5.6.51-1debian9 started.
Jul 07 20:16:26 managed-node1 podman[79243]: 2025-07-07 20:16:26.876531738 -0400 EDT m=+0.316858485 container start f33934011335e1e05e67f30d3833447877c8c3f24563f59e1979b85508143a46 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service)
Jul 07 20:16:26 managed-node1 quadlet-demo-mysql[79243]: f33934011335e1e05e67f30d3833447877c8c3f24563f59e1979b85508143a46
Jul 07 20:16:26 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:26+00:00 [Note] [Entrypoint]: Switching to dedicated user 'mysql'
Jul 07 20:16:26 managed-node1 podman[79339]: 2025-07-07 20:16:26.969487306 -0400 EDT m=+0.081760512 container health_status f33934011335e1e05e67f30d3833447877c8c3f24563f59e1979b85508143a46 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service)
Jul 07 20:16:26 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:26+00:00 [Note] [Entrypoint]: Entrypoint script for MySQL Server 5.6.51-1debian9 started.
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27+00:00 [Note] [Entrypoint]: Initializing database files
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 0 [Warning] TIMESTAMP with implicit DEFAULT value is deprecated. Please use --explicit_defaults_for_timestamp server option (see documentation for more details).
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 0 [Note] Ignoring --secure-file-priv value as server is running with --bootstrap.
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 0 [Note] /usr/sbin/mysqld (mysqld 5.6.51) starting as process 43 ...
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Using atomics to ref count buffer pool pages
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: The InnoDB memory heap is disabled
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Mutexes and rw_locks use GCC atomic builtins
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Memory barrier is not used
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Compressed tables use zlib 1.2.11
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Using Linux native AIO
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Using CPU crc32 instructions
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Initializing buffer pool, size = 128.0M
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Completed initialization of buffer pool
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: The first specified data file ./ibdata1 did not exist: a new database to be created!
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Setting file ./ibdata1 size to 12 MB
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Database physically writes the file full: wait...
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Setting log file ./ib_logfile101 size to 48 MB
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Setting log file ./ib_logfile1 size to 48 MB
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Renaming log file ./ib_logfile101 to ./ib_logfile0
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Warning] InnoDB: New log files created, LSN=45781
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Doublewrite buffer not found: creating new
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Doublewrite buffer created
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: 128 rollback segment(s) are active.
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Warning] InnoDB: Creating foreign key constraint system tables.
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Foreign key constraint system tables created
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Creating tablespace and datafile system tables.
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Tablespace and datafile system tables created.
Jul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Waiting for purge to start
Jul 07 20:16:27 managed-node1 python3.9[79551]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:16:28 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:28 43 [Note] InnoDB: 5.6.51 started; log sequence number 0
Jul 07 20:16:28 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:28 43 [Note] RSA private key file not found: /var/lib/mysql//private_key.pem. Some authentication plugins will not work.
Jul 07 20:16:28 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:28 43 [Note] RSA public key file not found: /var/lib/mysql//public_key.pem. Some authentication plugins will not work.
Jul 07 20:16:28 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:28 43 [Note] Binlog end
Jul 07 20:16:28 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:28 43 [Note] InnoDB: FTS optimize thread exiting.
Jul 07 20:16:28 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:28 43 [Note] InnoDB: Starting shutdown...
Jul 07 20:16:29 managed-node1 python3.9[79713]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 43 [Note] InnoDB: Shutdown completed; log sequence number 1625977
Jul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]:
Jul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]:
Jul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 0 [Warning] TIMESTAMP with implicit DEFAULT value is deprecated. Please use --explicit_defaults_for_timestamp server option (see documentation for more details).
Jul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 0 [Note] Ignoring --secure-file-priv value as server is running with --bootstrap.
Jul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 0 [Note] /usr/sbin/mysqld (mysqld 5.6.51) starting as process 66 ...
Jul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: Using atomics to ref count buffer pool pages
Jul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: The InnoDB memory heap is disabled
Jul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: Mutexes and rw_locks use GCC atomic builtins
Jul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: Memory barrier is not used
Jul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: Compressed tables use zlib 1.2.11
Jul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: Using Linux native AIO
Jul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: Using CPU crc32 instructions
Jul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: Initializing buffer pool, size = 128.0M
Jul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: Completed initialization of buffer pool
Jul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: Highest supported file format is Barracuda.
Jul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: 128 rollback segment(s) are active.
Jul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: Waiting for purge to start
Jul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: 5.6.51 started; log sequence number 1625977
Jul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] RSA private key file not found: /var/lib/mysql//private_key.pem. Some authentication plugins will not work.
Jul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] RSA public key file not found: /var/lib/mysql//public_key.pem. Some authentication plugins will not work.
Jul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] Binlog end
Jul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: FTS optimize thread exiting.
Jul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: Starting shutdown...
Jul 07 20:16:30 managed-node1 python3.9[79886]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:16:30 managed-node1 python3.9[80006]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933789.8840272-19439-56658192497812/.source.yml dest=/etc/containers/systemd/envoy-proxy-configmap.yml owner=root group=0 mode=0644 _original_basename=envoy-proxy-configmap.yml follow=False checksum=d681c7d56f912150d041873e880818b22a90c188 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:31 managed-node1 python3.9[80155]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Jul 07 20:16:31 managed-node1 systemd[1]: Reloading.
Jul 07 20:16:31 managed-node1 quadlet-generator[80161]: Warning: bogus.container specifies the image "this_is_a_bogus_image" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.
Jul 07 20:16:31 managed-node1 systemd-rc-local-generator[80173]: /etc/rc.d/rc.local is not marked executable, skipping.
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 66 [Note] InnoDB: Shutdown completed; log sequence number 1625987
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]:
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]:
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]:
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]:
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: PLEASE REMEMBER TO SET A PASSWORD FOR THE MySQL root USER !
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: To do so, start the server, then issue the following commands:
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]:
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: /usr/bin/mysqladmin -u root password 'new-password'
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: /usr/bin/mysqladmin -u root -h f33934011335 password 'new-password'
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]:
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: Alternatively you can run:
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]:
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: /usr/bin/mysql_secure_installation
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]:
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: which will also give you the option of removing the test
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: databases and anonymous user created by default. This is
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: strongly recommended for production servers.
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]:
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: See the manual for more instructions.
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]:
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: Please report any problems at http://bugs.mysql.com/
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]:
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: The latest information about MySQL is available on the web at
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]:
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: http://www.mysql.com
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]:
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: Support MySQL by buying support/licenses at http://shop.mysql.com
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]:
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: Note: new default config file not created.
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: Please make sure your config file is current
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]:
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: WARNING: Default config file /etc/mysql/my.cnf exists on the system
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: This file will be read by default by the MySQL server
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: If you do not want to use this, either remove it, or use the
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: --defaults-file argument to mysqld_safe when starting the server
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]:
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31+00:00 [Note] [Entrypoint]: Database files initialized
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31+00:00 [Note] [Entrypoint]: Starting temporary server
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31+00:00 [Note] [Entrypoint]: Waiting for server startup
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 0 [Warning] TIMESTAMP with implicit DEFAULT value is deprecated. Please use --explicit_defaults_for_timestamp server option (see documentation for more details).
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 0 [Note] mysqld (mysqld 5.6.51) starting as process 91 ...
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] Plugin 'FEDERATED' is disabled.
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] InnoDB: Using atomics to ref count buffer pool pages
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] InnoDB: The InnoDB memory heap is disabled
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] InnoDB: Mutexes and rw_locks use GCC atomic builtins
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] InnoDB: Memory barrier is not used
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] InnoDB: Compressed tables use zlib 1.2.11
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] InnoDB: Using Linux native AIO
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] InnoDB: Using CPU crc32 instructions
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] InnoDB: Initializing buffer pool, size = 128.0M
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] InnoDB: Completed initialization of buffer pool
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] InnoDB: Highest supported file format is Barracuda.
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] InnoDB: 128 rollback segment(s) are active.
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] InnoDB: Waiting for purge to start
Jul 07 20:16:32 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:32 91 [Note] InnoDB: 5.6.51 started; log sequence number 1625987
Jul 07 20:16:32 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:32 91 [Warning] No existing UUID has been found, so we assume that this is the first time that this server has been started. Generating a new UUID: ccf1f618-5b90-11f0-9d45-165e203fac44.
Jul 07 20:16:32 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:32 91 [Note] RSA private key file not found: /var/lib/mysql//private_key.pem. Some authentication plugins will not work.
Jul 07 20:16:32 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:32 91 [Note] RSA public key file not found: /var/lib/mysql//public_key.pem. Some authentication plugins will not work.
Jul 07 20:16:32 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:32 91 [Warning] Insecure configuration for --pid-file: Location '/var/run/mysqld' in the path is accessible to all OS users. Consider choosing a different directory.
Jul 07 20:16:32 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:32 91 [Warning] 'user' entry 'root@f33934011335' ignored in --skip-name-resolve mode.
Jul 07 20:16:32 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:32 91 [Warning] 'user' entry '@f33934011335' ignored in --skip-name-resolve mode.
Jul 07 20:16:32 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:32 91 [Warning] 'proxies_priv' entry '@ root@f33934011335' ignored in --skip-name-resolve mode.
Jul 07 20:16:32 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:32 91 [Note] Event Scheduler: Loaded 0 events
Jul 07 20:16:32 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:32 91 [Note] mysqld: ready for connections.
Jul 07 20:16:32 managed-node1 quadlet-demo-mysql[79332]: Version: '5.6.51' socket: '/var/run/mysqld/mysqld.sock' port: 0 MySQL Community Server (GPL)
Jul 07 20:16:32 managed-node1 python3.9[80366]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:16:32 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:32+00:00 [Note] [Entrypoint]: Temporary server started.
Jul 07 20:16:33 managed-node1 quadlet-demo-mysql[79332]: Warning: Unable to load '/usr/share/zoneinfo/iso3166.tab' as time zone. Skipping it.
Jul 07 20:16:33 managed-node1 quadlet-demo-mysql[79332]: Warning: Unable to load '/usr/share/zoneinfo/leap-seconds.list' as time zone. Skipping it.
Jul 07 20:16:33 managed-node1 quadlet-demo-mysql[79332]: Warning: Unable to load '/usr/share/zoneinfo/zone.tab' as time zone. Skipping it.
Jul 07 20:16:33 managed-node1 quadlet-demo-mysql[79332]: Warning: Unable to load '/usr/share/zoneinfo/zone1970.tab' as time zone. Skipping it.
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Warning] 'proxies_priv' entry '@ root@f33934011335' ignored in --skip-name-resolve mode.
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]:
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34+00:00 [Note] [Entrypoint]: Stopping temporary server
Jul 07 20:16:34 managed-node1 python3.9[80526]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] mysqld: Normal shutdown
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]:
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Giving 0 client threads a chance to die gracefully
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Event Scheduler: Purging the queue. 0 events
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down slave threads
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Forcefully disconnecting 0 remaining clients
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Binlog end
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'partition'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'PERFORMANCE_SCHEMA'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_SYS_DATAFILES'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_SYS_TABLESPACES'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_SYS_FOREIGN_COLS'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_SYS_FOREIGN'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_SYS_FIELDS'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_SYS_COLUMNS'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_SYS_INDEXES'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_SYS_TABLESTATS'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_SYS_TABLES'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_FT_INDEX_TABLE'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_FT_INDEX_CACHE'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_FT_CONFIG'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_FT_BEING_DELETED'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_FT_DELETED'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_FT_DEFAULT_STOPWORD'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_METRICS'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_BUFFER_POOL_STATS'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_BUFFER_PAGE_LRU'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_BUFFER_PAGE'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_CMP_PER_INDEX_RESET'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_CMP_PER_INDEX'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_CMPMEM_RESET'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_CMPMEM'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_CMP_RESET'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_CMP'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_LOCK_WAITS'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_LOCKS'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_TRX'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'InnoDB'
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] InnoDB: FTS optimize thread exiting.
Jul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] InnoDB: Starting shutdown...
Jul 07 20:16:34 managed-node1 python3.9[80682]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:16:34 managed-node1 python3.9[80802]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933794.3273475-19728-80933705754489/.source.yml _original_basename=.zg8_d8_6 follow=False checksum=998dccde0483b1654327a46ddd89cbaa47650370 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:35 managed-node1 python3.9[80951]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Jul 07 20:16:35 managed-node1 systemd[1]: Reloading.
Jul 07 20:16:35 managed-node1 quadlet-generator[80957]: Warning: bogus.container specifies the image "this_is_a_bogus_image" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.
Jul 07 20:16:35 managed-node1 systemd-rc-local-generator[80969]: /etc/rc.d/rc.local is not marked executable, skipping.
Jul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:35 91 [Note] InnoDB: Shutdown completed; log sequence number 1625997
Jul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:35 91 [Note] Shutting down plugin 'BLACKHOLE'
Jul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:35 91 [Note] Shutting down plugin 'ARCHIVE'
Jul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:35 91 [Note] Shutting down plugin 'MRG_MYISAM'
Jul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:35 91 [Note] Shutting down plugin 'MyISAM'
Jul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:35 91 [Note] Shutting down plugin 'MEMORY'
Jul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:35 91 [Note] Shutting down plugin 'CSV'
Jul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:35 91 [Note] Shutting down plugin 'sha256_password'
Jul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:35 91 [Note] Shutting down plugin 'mysql_old_password'
Jul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:35 91 [Note] Shutting down plugin 'mysql_native_password'
Jul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:35 91 [Note] Shutting down plugin 'binlog'
Jul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:35 91 [Note] mysqld: Shutdown complete
Jul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]:
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36+00:00 [Note] [Entrypoint]: Temporary server stopped
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]:
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36+00:00 [Note] [Entrypoint]: MySQL init process done. Ready for start up.
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]:
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 0 [Warning] TIMESTAMP with implicit DEFAULT value is deprecated. Please use --explicit_defaults_for_timestamp server option (see documentation for more details).
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 0 [Note] mysqld (mysqld 5.6.51) starting as process 1 ...
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] Plugin 'FEDERATED' is disabled.
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: Using atomics to ref count buffer pool pages
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: The InnoDB memory heap is disabled
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: Mutexes and rw_locks use GCC atomic builtins
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: Memory barrier is not used
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: Compressed tables use zlib 1.2.11
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: Using Linux native AIO
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: Using CPU crc32 instructions
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: Initializing buffer pool, size = 128.0M
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: Completed initialization of buffer pool
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: Highest supported file format is Barracuda.
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: 128 rollback segment(s) are active.
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: Waiting for purge to start
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: 5.6.51 started; log sequence number 1625997
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] RSA private key file not found: /var/lib/mysql//private_key.pem. Some authentication plugins will not work.
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] RSA public key file not found: /var/lib/mysql//public_key.pem. Some authentication plugins will not work.
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] Server hostname (bind-address): '*'; port: 3306
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] IPv6 is available.
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] - '::' resolves to '::';
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] Server socket created on IP: '::'.
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Warning] Insecure configuration for --pid-file: Location '/var/run/mysqld' in the path is accessible to all OS users. Consider choosing a different directory.
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Warning] 'proxies_priv' entry '@ root@f33934011335' ignored in --skip-name-resolve mode.
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] Event Scheduler: Loaded 0 events
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] mysqld: ready for connections.
Jul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: Version: '5.6.51' socket: '/var/run/mysqld/mysqld.sock' port: 3306 MySQL Community Server (GPL)
Jul 07 20:16:36 managed-node1 python3.9[81157]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:16:36 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.
Jul 07 20:16:37 managed-node1 python3.9[81308]: ansible-slurp Invoked with path=/etc/containers/systemd/quadlet-demo.yml src=/etc/containers/systemd/quadlet-demo.yml
Jul 07 20:16:38 managed-node1 python3.9[81457]: ansible-file Invoked with path=/tmp/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:38 managed-node1 python3.9[81606]: ansible-file Invoked with path=/tmp/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:50 managed-node1 podman[81763]: 2025-07-07 20:16:50.494372081 -0400 EDT m=+11.011831413 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache
Jul 07 20:16:54 managed-node1 podman[82201]: 2025-07-07 20:16:54.405201763 -0400 EDT m=+3.424988203 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0
Jul 07 20:16:54 managed-node1 python3.9[82482]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:55 managed-node1 python3.9[82631]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:16:55 managed-node1 python3.9[82751]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933814.984563-20249-159053501176281/.source.kube dest=/etc/containers/systemd/quadlet-demo.kube owner=root group=0 mode=0644 _original_basename=quadlet-demo.kube follow=False checksum=7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:56 managed-node1 python3.9[82900]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Jul 07 20:16:56 managed-node1 systemd[1]: Reloading.
Jul 07 20:16:56 managed-node1 quadlet-generator[82906]: Warning: bogus.container specifies the image "this_is_a_bogus_image" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.
Jul 07 20:16:56 managed-node1 systemd-rc-local-generator[82917]: /etc/rc.d/rc.local is not marked executable, skipping.
Jul 07 20:16:56 managed-node1 systemd[1]: Starting dnf makecache...
░░ Subject: A start job for unit dnf-makecache.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit dnf-makecache.service has begun execution.
░░
░░ The job identifier is 4813.
Jul 07 20:16:56 managed-node1 dnf[82935]: Failed determining last makecache time.
Jul 07 20:16:56 managed-node1 dnf[82935]: Beaker Client - RedHatEnterpriseLinux9 26 kB/s | 1.5 kB 00:00
Jul 07 20:16:56 managed-node1 dnf[82935]: Beaker harness 186 kB/s | 1.3 kB 00:00
Jul 07 20:16:56 managed-node1 dnf[82935]: Copr repo for beakerlib-libraries owned by bgon 15 kB/s | 1.8 kB 00:00
Jul 07 20:16:56 managed-node1 dnf[82935]: CentOS Stream 9 - BaseOS 107 kB/s | 6.7 kB 00:00
Jul 07 20:16:57 managed-node1 python3.9[83087]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None
Jul 07 20:16:57 managed-node1 systemd[1]: Starting quadlet-demo.service...
░░ Subject: A start job for unit quadlet-demo.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit quadlet-demo.service has begun execution.
░░
░░ The job identifier is 4943.
Jul 07 20:16:57 managed-node1 quadlet-demo[83099]: Pods stopped:
Jul 07 20:16:57 managed-node1 quadlet-demo[83099]: Pods removed:
Jul 07 20:16:57 managed-node1 quadlet-demo[83099]: Secrets removed:
Jul 07 20:16:57 managed-node1 quadlet-demo[83099]: Volumes removed:
Jul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.197476189 -0400 EDT m=+0.061646893 volume create wp-pv-claim
Jul 07 20:16:57 managed-node1 podman[83091]: 2025-07-07 20:16:57.207487227 -0400 EDT m=+0.103148810 container health_status f33934011335e1e05e67f30d3833447877c8c3f24563f59e1979b85508143a46 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service)
Jul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.222721195 -0400 EDT m=+0.086891899 container create 5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042 (image=, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.232816142 -0400 EDT m=+0.096986817 volume create envoy-proxy-config
Jul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.239515168 -0400 EDT m=+0.103685845 volume create envoy-certificates
Jul 07 20:16:57 managed-node1 systemd[1]: Created slice cgroup machine-libpod_pod_b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f.slice.
░░ Subject: A start job for unit machine-libpod_pod_b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f.slice has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit machine-libpod_pod_b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f.slice has finished successfully.
░░
░░ The job identifier is 5017.
Jul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.277595719 -0400 EDT m=+0.141766395 container create fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa (image=, name=b57dd77843de-infra, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.283498672 -0400 EDT m=+0.147669648 pod create b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f (image=, name=quadlet-demo)
Jul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.321988353 -0400 EDT m=+0.186159035 container create 8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:16:57 managed-node1 dnf[82935]: CentOS Stream 9 - AppStream 23 kB/s | 6.8 kB 00:00
Jul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.349118944 -0400 EDT m=+0.213289676 container create f5dcbbf00e552991b3b6792f8c3d2f6deb07fe4db47e3053c9d8e7efd6db9bad (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.349489551 -0400 EDT m=+0.213660343 container restart 5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042 (image=, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.291559491 -0400 EDT m=+0.155730291 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache
Jul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.32463886 -0400 EDT m=+0.188809721 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0
Jul 07 20:16:57 managed-node1 systemd[1]: Started libcrun container.
░░ Subject: A start job for unit libpod-5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit libpod-5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042.scope has finished successfully.
░░
░░ The job identifier is 5021.
Jul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.451993289 -0400 EDT m=+0.316164217 container init 5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042 (image=, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.458750698 -0400 EDT m=+0.322921489 container start 5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042 (image=, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:16:57 managed-node1 kernel: podman2: port 2(veth1) entered blocking state
Jul 07 20:16:57 managed-node1 kernel: podman2: port 2(veth1) entered disabled state
Jul 07 20:16:57 managed-node1 kernel: veth1: entered allmulticast mode
Jul 07 20:16:57 managed-node1 kernel: veth1: entered promiscuous mode
Jul 07 20:16:57 managed-node1 kernel: podman2: port 2(veth1) entered blocking state
Jul 07 20:16:57 managed-node1 kernel: podman2: port 2(veth1) entered forwarding state
Jul 07 20:16:57 managed-node1 NetworkManager[642]: [1751933817.4941] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/11)
Jul 07 20:16:57 managed-node1 NetworkManager[642]: [1751933817.4968] device (veth1): carrier: link connected
Jul 07 20:16:57 managed-node1 systemd-udevd[83134]: Network interface NamePolicy= disabled on kernel command line.
Jul 07 20:16:57 managed-node1 dnf[82935]: CentOS Stream 9 - HighAvailability 99 kB/s | 7.1 kB 00:00
Jul 07 20:16:57 managed-node1 systemd[1]: Started libcrun container.
░░ Subject: A start job for unit libpod-fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit libpod-fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa.scope has finished successfully.
░░
░░ The job identifier is 5026.
Jul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.630340964 -0400 EDT m=+0.494511760 container init fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa (image=, name=b57dd77843de-infra, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.633530268 -0400 EDT m=+0.497701060 container start fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa (image=, name=b57dd77843de-infra, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:16:57 managed-node1 systemd[1]: Started libcrun container.
░░ Subject: A start job for unit libpod-8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit libpod-8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5.scope has finished successfully.
░░
░░ The job identifier is 5031.
Jul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.680011486 -0400 EDT m=+0.544182268 container init 8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.684449747 -0400 EDT m=+0.548620577 container start 8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:16:57 managed-node1 quadlet-demo-wordpress[83203]: WordPress not found in /var/www/html - copying now...
Jul 07 20:16:57 managed-node1 dnf[82935]: CentOS Stream 9 - Extras packages 46 kB/s | 7.3 kB 00:00
Jul 07 20:16:57 managed-node1 systemd[1]: Started libcrun container.
░░ Subject: A start job for unit libpod-f5dcbbf00e552991b3b6792f8c3d2f6deb07fe4db47e3053c9d8e7efd6db9bad.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit libpod-f5dcbbf00e552991b3b6792f8c3d2f6deb07fe4db47e3053c9d8e7efd6db9bad.scope has finished successfully.
░░
░░ The job identifier is 5036.
Jul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.786067539 -0400 EDT m=+0.650238481 container init f5dcbbf00e552991b3b6792f8c3d2f6deb07fe4db47e3053c9d8e7efd6db9bad (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.793179398 -0400 EDT m=+0.657350165 container start f5dcbbf00e552991b3b6792f8c3d2f6deb07fe4db47e3053c9d8e7efd6db9bad (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.801305069 -0400 EDT m=+0.665475779 pod start b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f (image=, name=quadlet-demo)
Jul 07 20:16:57 managed-node1 systemd[1]: Started quadlet-demo.service.
░░ Subject: A start job for unit quadlet-demo.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit quadlet-demo.service has finished successfully.
░░
░░ The job identifier is 4943.
Jul 07 20:16:57 managed-node1 quadlet-demo[83099]: Volumes:
Jul 07 20:16:57 managed-node1 quadlet-demo[83099]: wp-pv-claim
Jul 07 20:16:57 managed-node1 quadlet-demo[83099]: Pod:
Jul 07 20:16:57 managed-node1 quadlet-demo[83099]: b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f
Jul 07 20:16:57 managed-node1 quadlet-demo[83099]: Containers:
Jul 07 20:16:57 managed-node1 quadlet-demo[83099]: 8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5
Jul 07 20:16:57 managed-node1 quadlet-demo[83099]: f5dcbbf00e552991b3b6792f8c3d2f6deb07fe4db47e3053c9d8e7efd6db9bad
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:404] initializing epoch 0 (base id=0, hot restart version=11.104)
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:406] statically linked extensions:
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.stats_sinks: envoy.dog_statsd, envoy.graphite_statsd, envoy.metrics_service, envoy.stat_sinks.dog_statsd, envoy.stat_sinks.graphite_statsd, envoy.stat_sinks.hystrix, envoy.stat_sinks.metrics_service, envoy.stat_sinks.statsd, envoy.stat_sinks.wasm, envoy.statsd
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.tls.cert_validator: envoy.tls.cert_validator.default, envoy.tls.cert_validator.spiffe
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.listener_manager_impl: envoy.listener_manager_impl.default
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.matching.http.custom_matchers: envoy.matching.custom_matchers.trie_matcher
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.filters.network: envoy.echo, envoy.ext_authz, envoy.filters.network.connection_limit, envoy.filters.network.direct_response, envoy.filters.network.dubbo_proxy, envoy.filters.network.echo, envoy.filters.network.ext_authz, envoy.filters.network.http_connection_manager, envoy.filters.network.local_ratelimit, envoy.filters.network.mongo_proxy, envoy.filters.network.ratelimit, envoy.filters.network.rbac, envoy.filters.network.redis_proxy, envoy.filters.network.sni_cluster, envoy.filters.network.sni_dynamic_forward_proxy, envoy.filters.network.tcp_proxy, envoy.filters.network.thrift_proxy, envoy.filters.network.wasm, envoy.filters.network.zookeeper_proxy, envoy.http_connection_manager, envoy.mongo_proxy, envoy.ratelimit, envoy.redis_proxy, envoy.tcp_proxy
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.upstreams: envoy.filters.connection_pools.tcp.generic
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.access_loggers.extension_filters: envoy.access_loggers.extension_filters.cel
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.filters.http.upstream: envoy.buffer, envoy.filters.http.admission_control, envoy.filters.http.buffer, envoy.filters.http.upstream_codec
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.http.stateful_header_formatters: envoy.http.stateful_header_formatters.preserve_case, preserve_case
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.dubbo_proxy.protocols: dubbo
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.quic.server.crypto_stream: envoy.quic.crypto_stream.server.quiche
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.route.early_data_policy: envoy.route.early_data_policy.default
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.guarddog_actions: envoy.watchdog.abort_action, envoy.watchdog.profile_action
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.config.validators: envoy.config.validators.minimum_clusters, envoy.config.validators.minimum_clusters_validator
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.matching.http.input: envoy.matching.inputs.destination_ip, envoy.matching.inputs.destination_port, envoy.matching.inputs.direct_source_ip, envoy.matching.inputs.dns_san, envoy.matching.inputs.request_headers, envoy.matching.inputs.request_trailers, envoy.matching.inputs.response_headers, envoy.matching.inputs.response_trailers, envoy.matching.inputs.server_name, envoy.matching.inputs.source_ip, envoy.matching.inputs.source_port, envoy.matching.inputs.source_type, envoy.matching.inputs.status_code_class_input, envoy.matching.inputs.status_code_input, envoy.matching.inputs.subject, envoy.matching.inputs.uri_san
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.quic.connection_id_generator: envoy.quic.deterministic_connection_id_generator
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.filters.listener: envoy.filters.listener.http_inspector, envoy.filters.listener.original_dst, envoy.filters.listener.original_src, envoy.filters.listener.proxy_protocol, envoy.filters.listener.tls_inspector, envoy.listener.http_inspector, envoy.listener.original_dst, envoy.listener.original_src, envoy.listener.proxy_protocol, envoy.listener.tls_inspector
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.filters.udp_listener: envoy.filters.udp.dns_filter, envoy.filters.udp_listener.udp_proxy
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.rate_limit_descriptors: envoy.rate_limit_descriptors.expr
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.path.match: envoy.path.match.uri_template.uri_template_matcher
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.thrift_proxy.transports: auto, framed, header, unframed
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.request_id: envoy.request_id.uuid
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.http.stateful_session: envoy.http.stateful_session.cookie, envoy.http.stateful_session.header
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.matching.network.custom_matchers: envoy.matching.custom_matchers.trie_matcher
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.load_balancing_policies: envoy.load_balancing_policies.least_request, envoy.load_balancing_policies.random, envoy.load_balancing_policies.round_robin
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.http.early_header_mutation: envoy.http.early_header_mutation.header_mutation
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.udp_packet_writer: envoy.udp_packet_writer.default, envoy.udp_packet_writer.gso
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.resource_monitors: envoy.resource_monitors.fixed_heap, envoy.resource_monitors.injected_resource
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.network.dns_resolver: envoy.network.dns_resolver.cares, envoy.network.dns_resolver.getaddrinfo
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.health_checkers: envoy.health_checkers.redis, envoy.health_checkers.thrift
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] quic.http_server_connection: quic.http_server_connection.default
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.formatter: envoy.formatter.metadata, envoy.formatter.req_without_query
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.regex_engines: envoy.regex_engines.google_re2
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.matching.input_matchers: envoy.matching.matchers.consistent_hashing, envoy.matching.matchers.ip
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.compression.decompressor: envoy.compression.brotli.decompressor, envoy.compression.gzip.decompressor, envoy.compression.zstd.decompressor
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.853][1][info][main] [source/server/server.cc:408] envoy.upstream_options: envoy.extensions.upstreams.http.v3.HttpProtocolOptions, envoy.extensions.upstreams.tcp.v3.TcpProtocolOptions, envoy.upstreams.http.http_protocol_options, envoy.upstreams.tcp.tcp_protocol_options
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.853][1][info][main] [source/server/server.cc:408] envoy.resolvers: envoy.ip
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.853][1][info][main] [source/server/server.cc:408] envoy.path.rewrite: envoy.path.rewrite.uri_template.uri_template_rewriter
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.853][1][info][main] [source/server/server.cc:408] envoy.bootstrap: envoy.bootstrap.internal_listener, envoy.bootstrap.wasm, envoy.extensions.network.socket_interface.default_socket_interface
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.854][1][info][main] [source/server/server.cc:408] envoy.transport_sockets.downstream: envoy.transport_sockets.alts, envoy.transport_sockets.quic, envoy.transport_sockets.raw_buffer, envoy.transport_sockets.starttls, envoy.transport_sockets.tap, envoy.transport_sockets.tcp_stats, envoy.transport_sockets.tls, raw_buffer, starttls, tls
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.854][1][info][main] [source/server/server.cc:408] envoy.dubbo_proxy.serializers: dubbo.hessian2
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.854][1][info][main] [source/server/server.cc:408] envoy.matching.action: envoy.matching.actions.format_string, filter-chain-name
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.854][1][info][main] [source/server/server.cc:408] envoy.grpc_credentials: envoy.grpc_credentials.aws_iam, envoy.grpc_credentials.default, envoy.grpc_credentials.file_based_metadata
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.854][1][info][main] [source/server/server.cc:408] envoy.retry_priorities: envoy.retry_priorities.previous_priorities
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.855][1][info][main] [source/server/server.cc:408] envoy.filters.http: envoy.bandwidth_limit, envoy.buffer, envoy.cors, envoy.csrf, envoy.ext_authz, envoy.ext_proc, envoy.fault, envoy.filters.http.adaptive_concurrency, envoy.filters.http.admission_control, envoy.filters.http.alternate_protocols_cache, envoy.filters.http.aws_lambda, envoy.filters.http.aws_request_signing, envoy.filters.http.bandwidth_limit, envoy.filters.http.buffer, envoy.filters.http.cache, envoy.filters.http.cdn_loop, envoy.filters.http.composite, envoy.filters.http.compressor, envoy.filters.http.cors, envoy.filters.http.csrf, envoy.filters.http.custom_response, envoy.filters.http.decompressor, envoy.filters.http.dynamic_forward_proxy, envoy.filters.http.ext_authz, envoy.filters.http.ext_proc, envoy.filters.http.fault, envoy.filters.http.file_system_buffer, envoy.filters.http.gcp_authn, envoy.filters.http.grpc_http1_bridge, envoy.filters.http.grpc_http1_reverse_bridge, envoy.filters.http.grpc_json_transcoder, envoy.filters.http.grpc_stats, envoy.filters.http.grpc_web, envoy.filters.http.header_to_metadata, envoy.filters.http.health_check, envoy.filters.http.ip_tagging, envoy.filters.http.jwt_authn, envoy.filters.http.local_ratelimit, envoy.filters.http.lua, envoy.filters.http.match_delegate, envoy.filters.http.oauth2, envoy.filters.http.on_demand, envoy.filters.http.original_src, envoy.filters.http.rate_limit_quota, envoy.filters.http.ratelimit, envoy.filters.http.rbac, envoy.filters.http.router, envoy.filters.http.set_metadata, envoy.filters.http.stateful_session, envoy.filters.http.tap, envoy.filters.http.wasm, envoy.grpc_http1_bridge, envoy.grpc_json_transcoder, envoy.grpc_web, envoy.health_check, envoy.ip_tagging, envoy.local_rate_limit, envoy.lua, envoy.rate_limit, envoy.router
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.retry_host_predicates: envoy.retry_host_predicates.omit_canary_hosts, envoy.retry_host_predicates.omit_host_metadata, envoy.retry_host_predicates.previous_hosts
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.http.cache: envoy.extensions.http.cache.file_system_http_cache, envoy.extensions.http.cache.simple
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.wasm.runtime: envoy.wasm.runtime.null, envoy.wasm.runtime.v8
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.common.key_value: envoy.key_value.file_based
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.quic.proof_source: envoy.quic.proof_source.filter_chain
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.access_loggers: envoy.access_loggers.file, envoy.access_loggers.http_grpc, envoy.access_loggers.open_telemetry, envoy.access_loggers.stderr, envoy.access_loggers.stdout, envoy.access_loggers.tcp_grpc, envoy.access_loggers.wasm, envoy.file_access_log, envoy.http_grpc_access_log, envoy.open_telemetry_access_log, envoy.stderr_access_log, envoy.stdout_access_log, envoy.tcp_grpc_access_log, envoy.wasm_access_log
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.http.original_ip_detection: envoy.http.original_ip_detection.custom_header, envoy.http.original_ip_detection.xff
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.internal_redirect_predicates: envoy.internal_redirect_predicates.allow_listed_routes, envoy.internal_redirect_predicates.previous_routes, envoy.internal_redirect_predicates.safe_cross_scheme
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.connection_handler: envoy.connection_handler.default
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.thrift_proxy.protocols: auto, binary, binary/non-strict, compact, twitter
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.http.header_validators: envoy.http.header_validators.envoy_default
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.matching.network.input: envoy.matching.inputs.application_protocol, envoy.matching.inputs.destination_ip, envoy.matching.inputs.destination_port, envoy.matching.inputs.direct_source_ip, envoy.matching.inputs.dns_san, envoy.matching.inputs.server_name, envoy.matching.inputs.source_ip, envoy.matching.inputs.source_port, envoy.matching.inputs.source_type, envoy.matching.inputs.subject, envoy.matching.inputs.transport_protocol, envoy.matching.inputs.uri_san
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.matching.common_inputs: envoy.matching.common_inputs.environment_variable
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] network.connection.client: default, envoy_internal
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.tracers: envoy.dynamic.ot, envoy.tracers.datadog, envoy.tracers.dynamic_ot, envoy.tracers.opencensus, envoy.tracers.opentelemetry, envoy.tracers.skywalking, envoy.tracers.xray, envoy.tracers.zipkin, envoy.zipkin
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.transport_sockets.upstream: envoy.transport_sockets.alts, envoy.transport_sockets.http_11_proxy, envoy.transport_sockets.internal_upstream, envoy.transport_sockets.quic, envoy.transport_sockets.raw_buffer, envoy.transport_sockets.starttls, envoy.transport_sockets.tap, envoy.transport_sockets.tcp_stats, envoy.transport_sockets.tls, envoy.transport_sockets.upstream_proxy_protocol, raw_buffer, starttls, tls
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.thrift_proxy.filters: envoy.filters.thrift.header_to_metadata, envoy.filters.thrift.payload_to_metadata, envoy.filters.thrift.rate_limit, envoy.filters.thrift.router
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.compression.compressor: envoy.compression.brotli.compressor, envoy.compression.gzip.compressor, envoy.compression.zstd.compressor
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.clusters: envoy.cluster.eds, envoy.cluster.logical_dns, envoy.cluster.original_dst, envoy.cluster.static, envoy.cluster.strict_dns, envoy.clusters.aggregate, envoy.clusters.dynamic_forward_proxy, envoy.clusters.redis
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.http.custom_response: envoy.extensions.http.custom_response.local_response_policy, envoy.extensions.http.custom_response.redirect_policy
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.dubbo_proxy.filters: envoy.filters.dubbo.router
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.rbac.matchers: envoy.rbac.matchers.upstream_ip_port
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.862][1][info][main] [source/server/server.cc:456] HTTP header map info:
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.864][1][info][main] [source/server/server.cc:459] request header map: 672 bytes: :authority,:method,:path,:protocol,:scheme,accept,accept-encoding,access-control-request-headers,access-control-request-method,access-control-request-private-network,authentication,authorization,cache-control,cdn-loop,connection,content-encoding,content-length,content-type,expect,grpc-accept-encoding,grpc-timeout,if-match,if-modified-since,if-none-match,if-range,if-unmodified-since,keep-alive,origin,pragma,proxy-connection,proxy-status,referer,te,transfer-encoding,upgrade,user-agent,via,x-client-trace-id,x-envoy-attempt-count,x-envoy-decorator-operation,x-envoy-downstream-service-cluster,x-envoy-downstream-service-node,x-envoy-expected-rq-timeout-ms,x-envoy-external-address,x-envoy-force-trace,x-envoy-hedge-on-per-try-timeout,x-envoy-internal,x-envoy-ip-tags,x-envoy-is-timeout-retry,x-envoy-max-retries,x-envoy-original-path,x-envoy-original-url,x-envoy-retriable-header-names,x-envoy-retriable-status-codes,x-envoy-retry-grpc-on,x-envoy-retry-on,x-envoy-upstream-alt-stat-name,x-envoy-upstream-rq-per-try-timeout-ms,x-envoy-upstream-rq-timeout-alt-response,x-envoy-upstream-rq-timeout-ms,x-envoy-upstream-stream-duration-ms,x-forwarded-client-cert,x-forwarded-for,x-forwarded-host,x-forwarded-port,x-forwarded-proto,x-ot-span-context,x-request-id
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.864][1][info][main] [source/server/server.cc:459] request trailer map: 120 bytes:
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.864][1][info][main] [source/server/server.cc:459] response header map: 432 bytes: :status,access-control-allow-credentials,access-control-allow-headers,access-control-allow-methods,access-control-allow-origin,access-control-allow-private-network,access-control-expose-headers,access-control-max-age,age,cache-control,connection,content-encoding,content-length,content-type,date,etag,expires,grpc-message,grpc-status,keep-alive,last-modified,location,proxy-connection,proxy-status,server,transfer-encoding,upgrade,vary,via,x-envoy-attempt-count,x-envoy-decorator-operation,x-envoy-degraded,x-envoy-immediate-health-check-fail,x-envoy-ratelimited,x-envoy-upstream-canary,x-envoy-upstream-healthchecked-cluster,x-envoy-upstream-service-time,x-request-id
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.864][1][info][main] [source/server/server.cc:459] response trailer map: 144 bytes: grpc-message,grpc-status
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.870][1][info][main] [source/server/server.cc:819] runtime: {}
Jul 07 20:16:57 managed-node1 dnf[82935]: Extra Packages for Enterprise Linux 9 - x86_64 235 kB/s | 34 kB 00:00
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.887][1][info][admin] [source/server/admin/admin.cc:67] admin address: 0.0.0.0:9901
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.888][1][info][config] [source/server/configuration_impl.cc:131] loading tracing configuration
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.888][1][info][config] [source/server/configuration_impl.cc:91] loading 0 static secret(s)
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.888][1][info][config] [source/server/configuration_impl.cc:97] loading 1 cluster(s)
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.902][1][info][config] [source/server/configuration_impl.cc:101] loading 1 listener(s)
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.918][1][info][config] [source/server/configuration_impl.cc:113] loading stats configuration
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.918][1][info][runtime] [source/common/runtime/runtime_impl.cc:463] RTDS has finished initialization
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.919][1][info][upstream] [source/common/upstream/cluster_manager_impl.cc:226] cm init: all clusters initialized
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.919][1][warning][main] [source/server/server.cc:794] there is no configured limit to the number of allowed active connections. Set a limit via the runtime key overload.global_downstream_max_connections
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.922][1][info][main] [source/server/server.cc:896] all clusters initialized. initializing init manager
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.922][1][info][config] [source/extensions/listener_managers/listener_manager/listener_manager_impl.cc:852] all dependencies initialized. starting workers
Jul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.923][1][info][main] [source/server/server.cc:915] starting main dispatch loop
Jul 07 20:16:57 managed-node1 quadlet-demo-wordpress[83203]: Complete! WordPress has been successfully copied to /var/www/html
Jul 07 20:16:58 managed-node1 python3.9[83469]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/containers/systemd _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:16:59 managed-node1 dnf[82935]: Extra Packages for Enterprise Linux 9 - x86_64 16 MB/s | 20 MB 00:01
Jul 07 20:16:59 managed-node1 quadlet-demo-wordpress[83203]: AH00558: apache2: Could not reliably determine the server's fully qualified domain name, using 192.168.30.3. Set the 'ServerName' directive globally to suppress this message
Jul 07 20:16:59 managed-node1 quadlet-demo-wordpress[83203]: AH00558: apache2: Could not reliably determine the server's fully qualified domain name, using 192.168.30.3. Set the 'ServerName' directive globally to suppress this message
Jul 07 20:16:59 managed-node1 quadlet-demo-wordpress[83203]: [Tue Jul 08 00:16:59.357593 2025] [mpm_prefork:notice] [pid 1] AH00163: Apache/2.4.10 (Debian) PHP/5.6.32 configured -- resuming normal operations
Jul 07 20:16:59 managed-node1 quadlet-demo-wordpress[83203]: [Tue Jul 08 00:16:59.360135 2025] [core:notice] [pid 1] AH00094: Command line: 'apache2 -D FOREGROUND'
Jul 07 20:16:59 managed-node1 python3.9[83699]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:16:59 managed-node1 dnf[82935]: Extra Packages for Enterprise Linux 9 - x86_64 69 kB/s | 21 kB 00:00
Jul 07 20:16:59 managed-node1 python3.9[83873]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:17:00 managed-node1 dnf[82935]: Extra Packages for Enterprise Linux 9 - x86_64 229 kB/s | 23 kB 00:00
Jul 07 20:17:00 managed-node1 dnf[82935]: Extra Packages for Enterprise Linux 9 - x86_64 14 MB/s | 4.0 MB 00:00
Jul 07 20:17:00 managed-node1 python3.9[84035]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:17:00 managed-node1 python3.9[84192]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:17:01 managed-node1 python3.9[84344]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:17:01 managed-node1 quadlet-demo-wordpress[83203]: 127.0.0.1 - - [08/Jul/2025:00:17:01 +0000] "GET / HTTP/1.1" 302 324 "-" "ansible-httpget"
Jul 07 20:17:02 managed-node1 quadlet-demo-wordpress[83203]: 127.0.0.1 - - [08/Jul/2025:00:17:01 +0000] "GET /wp-admin/install.php HTTP/1.1" 200 11984 "-" "ansible-httpget"
Jul 07 20:17:03 managed-node1 python3.9[84495]: ansible-ansible.legacy.command Invoked with _raw_params=cat /run/out _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:17:03 managed-node1 python3.9[84645]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:17:03 managed-node1 dnf[82935]: Extra Packages for Enterprise Linux 9 openh264 6.0 kB/s | 993 B 00:00
Jul 07 20:17:03 managed-node1 dnf[82935]: Copr repo for qa-tools owned by lpol 32 kB/s | 1.8 kB 00:00
Jul 07 20:17:04 managed-node1 python3.9[84804]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:17:04 managed-node1 python3.9[84961]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units --all | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:17:05 managed-node1 python3.9[85113]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/systemd/system _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:17:06 managed-node1 dnf[82935]: Metadata cache created.
Jul 07 20:17:06 managed-node1 systemd[1]: dnf-makecache.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit dnf-makecache.service has successfully entered the 'dead' state.
Jul 07 20:17:06 managed-node1 systemd[1]: Finished dnf makecache.
░░ Subject: A start job for unit dnf-makecache.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit dnf-makecache.service has finished successfully.
░░
░░ The job identifier is 4813.
Jul 07 20:17:06 managed-node1 systemd[1]: dnf-makecache.service: Consumed 7.664s CPU time.
░░ Subject: Resources consumed by unit runtime
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit dnf-makecache.service completed and consumed the indicated resources.
Jul 07 20:17:07 managed-node1 python3.9[85412]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:17:08 managed-node1 python3.9[85567]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:17:11 managed-node1 python3.9[85718]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Jul 07 20:17:14 managed-node1 python3.9[85868]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None
Jul 07 20:17:15 managed-node1 python3.9[86019]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Jul 07 20:17:15 managed-node1 python3.9[86170]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None
Jul 07 20:17:16 managed-node1 python3.9[86319]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None
Jul 07 20:17:18 managed-node1 podman[86469]: 2025-07-07 20:17:18.711105559 -0400 EDT m=+0.019785469 secret remove ea8c34f75eeeee13a33c9ef0e
Jul 07 20:17:19 managed-node1 podman[86626]: 2025-07-07 20:17:19.995189735 -0400 EDT m=+0.018739940 secret remove cb2d4c65044c524adeead96e5
Jul 07 20:17:21 managed-node1 podman[86782]: 2025-07-07 20:17:21.222445829 -0400 EDT m=+0.020468808 secret remove a95acde29e8944984a9c6b05c
Jul 07 20:17:22 managed-node1 python3.9[86938]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:17:23 managed-node1 python3.9[87089]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None
Jul 07 20:17:23 managed-node1 systemd[1]: Reloading.
Jul 07 20:17:23 managed-node1 quadlet-generator[87097]: Warning: bogus.container specifies the image "this_is_a_bogus_image" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.
Jul 07 20:17:23 managed-node1 systemd-rc-local-generator[87109]: /etc/rc.d/rc.local is not marked executable, skipping.
Jul 07 20:17:23 managed-node1 systemd[1]: Stopping quadlet-demo.service...
░░ Subject: A stop job for unit quadlet-demo.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit quadlet-demo.service has begun execution.
░░
░░ The job identifier is 5041.
Jul 07 20:17:23 managed-node1 systemd[1]: libpod-5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit libpod-5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042.scope has successfully entered the 'dead' state.
Jul 07 20:17:23 managed-node1 podman[87128]: 2025-07-07 20:17:23.586690926 -0400 EDT m=+0.025121505 container died 5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042 (image=, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:17:23 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042-rootfs-merge.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay\x2dcontainers-5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042-rootfs-merge.mount has successfully entered the 'dead' state.
Jul 07 20:17:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042-userdata-shm.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay\x2dcontainers-5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042-userdata-shm.mount has successfully entered the 'dead' state.
Jul 07 20:17:24 managed-node1 podman[87128]: 2025-07-07 20:17:24.598463702 -0400 EDT m=+1.036894323 container cleanup 5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042 (image=, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:17:24 managed-node1 podman[87138]: 2025-07-07 20:17:24.64198547 -0400 EDT m=+0.026141643 pod stop b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f (image=, name=quadlet-demo)
Jul 07 20:17:24 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:17:24.669][1][warning][main] [source/server/server.cc:854] caught ENVOY_SIGTERM
Jul 07 20:17:24 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:17:24.670][1][info][main] [source/server/server.cc:985] shutting down server instance
Jul 07 20:17:24 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:17:24.670][1][info][main] [source/server/server.cc:920] main dispatch loop exited
Jul 07 20:17:24 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:17:24.670][1][info][main] [source/server/server.cc:972] exiting
Jul 07 20:17:24 managed-node1 systemd[1]: libpod-f5dcbbf00e552991b3b6792f8c3d2f6deb07fe4db47e3053c9d8e7efd6db9bad.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit libpod-f5dcbbf00e552991b3b6792f8c3d2f6deb07fe4db47e3053c9d8e7efd6db9bad.scope has successfully entered the 'dead' state.
Jul 07 20:17:24 managed-node1 podman[87138]: 2025-07-07 20:17:24.680641899 -0400 EDT m=+0.064798159 container died f5dcbbf00e552991b3b6792f8c3d2f6deb07fe4db47e3053c9d8e7efd6db9bad (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:17:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay-385b8e73dfdfcde23ee0e5b8bf415127191cfc5d4323574c93bc4e97979eab2d-merged.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay-385b8e73dfdfcde23ee0e5b8bf415127191cfc5d4323574c93bc4e97979eab2d-merged.mount has successfully entered the 'dead' state.
Jul 07 20:17:24 managed-node1 podman[87138]: 2025-07-07 20:17:24.713081174 -0400 EDT m=+0.097237436 container cleanup f5dcbbf00e552991b3b6792f8c3d2f6deb07fe4db47e3053c9d8e7efd6db9bad (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:17:24 managed-node1 quadlet-demo-wordpress[83203]: [Tue Jul 08 00:17:24.732426 2025] [mpm_prefork:notice] [pid 1] AH00169: caught SIGTERM, shutting down
Jul 07 20:17:24 managed-node1 systemd[1]: libpod-8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit libpod-8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5.scope has successfully entered the 'dead' state.
Jul 07 20:17:24 managed-node1 systemd[1]: libpod-8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5.scope: Consumed 1.319s CPU time.
░░ Subject: Resources consumed by unit runtime
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit libpod-8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5.scope completed and consumed the indicated resources.
Jul 07 20:17:24 managed-node1 podman[87138]: 2025-07-07 20:17:24.751824722 -0400 EDT m=+0.135980934 container died 8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:17:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay-976835557e70125fb8b0fbe2f888b9f69052ecf805471614a74eec815f5ce212-merged.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay-976835557e70125fb8b0fbe2f888b9f69052ecf805471614a74eec815f5ce212-merged.mount has successfully entered the 'dead' state.
Jul 07 20:17:24 managed-node1 podman[87138]: 2025-07-07 20:17:24.78485353 -0400 EDT m=+0.169009633 container cleanup 8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:17:24 managed-node1 systemd[1]: libpod-fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit libpod-fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa.scope has successfully entered the 'dead' state.
Jul 07 20:17:24 managed-node1 podman[87138]: 2025-07-07 20:17:24.803343794 -0400 EDT m=+0.187500269 container died fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa (image=, name=b57dd77843de-infra, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:17:24 managed-node1 kernel: podman2: port 2(veth1) entered disabled state
Jul 07 20:17:24 managed-node1 kernel: veth1 (unregistering): left allmulticast mode
Jul 07 20:17:24 managed-node1 kernel: veth1 (unregistering): left promiscuous mode
Jul 07 20:17:24 managed-node1 kernel: podman2: port 2(veth1) entered disabled state
Jul 07 20:17:24 managed-node1 systemd[1]: run-netns-netns\x2dad524ec0\x2da4a3\x2d4520\x2d8e2f\x2dc0b64ccd5fc1.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit run-netns-netns\x2dad524ec0\x2da4a3\x2d4520\x2d8e2f\x2dc0b64ccd5fc1.mount has successfully entered the 'dead' state.
Jul 07 20:17:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa-rootfs-merge.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay\x2dcontainers-fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa-rootfs-merge.mount has successfully entered the 'dead' state.
Jul 07 20:17:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa-userdata-shm.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay\x2dcontainers-fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa-userdata-shm.mount has successfully entered the 'dead' state.
Jul 07 20:17:24 managed-node1 podman[87138]: 2025-07-07 20:17:24.954632715 -0400 EDT m=+0.338788816 container cleanup fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa (image=, name=b57dd77843de-infra, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:17:24 managed-node1 systemd[1]: Removed slice cgroup machine-libpod_pod_b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f.slice.
░░ Subject: A stop job for unit machine-libpod_pod_b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f.slice has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit machine-libpod_pod_b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f.slice has finished.
░░
░░ The job identifier is 5043 and the job result is done.
Jul 07 20:17:24 managed-node1 systemd[1]: machine-libpod_pod_b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f.slice: Consumed 1.428s CPU time.
░░ Subject: Resources consumed by unit runtime
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit machine-libpod_pod_b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f.slice completed and consumed the indicated resources.
Jul 07 20:17:24 managed-node1 podman[87138]: 2025-07-07 20:17:24.980101642 -0400 EDT m=+0.364257774 container remove 8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:17:24 managed-node1 podman[87138]: 2025-07-07 20:17:24.984329832 -0400 EDT m=+0.368485981 container remove f5dcbbf00e552991b3b6792f8c3d2f6deb07fe4db47e3053c9d8e7efd6db9bad (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:17:25 managed-node1 podman[87138]: 2025-07-07 20:17:25.000781891 -0400 EDT m=+0.384938028 container remove fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa (image=, name=b57dd77843de-infra, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:17:25 managed-node1 systemd[1]: machine-libpod_pod_b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f.slice: Failed to open /run/systemd/transient/machine-libpod_pod_b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f.slice: No such file or directory
Jul 07 20:17:25 managed-node1 podman[87138]: 2025-07-07 20:17:25.007117903 -0400 EDT m=+0.391274000 pod remove b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f (image=, name=quadlet-demo)
Jul 07 20:17:25 managed-node1 podman[87138]: 2025-07-07 20:17:25.024618836 -0400 EDT m=+0.408775208 container remove 5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042 (image=, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Jul 07 20:17:25 managed-node1 quadlet-demo[87138]: Pods stopped:
Jul 07 20:17:25 managed-node1 quadlet-demo[87138]: b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f
Jul 07 20:17:25 managed-node1 quadlet-demo[87138]: Pods removed:
Jul 07 20:17:25 managed-node1 quadlet-demo[87138]: b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f
Jul 07 20:17:25 managed-node1 quadlet-demo[87138]: Secrets removed:
Jul 07 20:17:25 managed-node1 quadlet-demo[87138]: Volumes removed:
Jul 07 20:17:25 managed-node1 systemd[1]: quadlet-demo.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit quadlet-demo.service has successfully entered the 'dead' state.
Jul 07 20:17:25 managed-node1 systemd[1]: Stopped quadlet-demo.service.
░░ Subject: A stop job for unit quadlet-demo.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit quadlet-demo.service has finished.
░░
░░ The job identifier is 5041 and the job result is done.
Jul 07 20:17:25 managed-node1 python3.9[87347]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:17:26 managed-node1 python3.9[87647]: ansible-ansible.legacy.command Invoked with _raw_params=exec 1>&2
set -x
set -o pipefail
systemctl list-units --plain -l --all | grep quadlet || :
systemctl list-unit-files --all | grep quadlet || :
systemctl list-units --plain --failed -l --all | grep quadlet || :
_uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:17:27 managed-node1 python3.9[87803]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
PLAY RECAP *********************************************************************
managed-node1 : ok=255 changed=26 unreachable=0 failed=1 skipped=341 rescued=1 ignored=0
SYSTEM ROLES ERRORS BEGIN v1
[
{
"ansible_version": "2.17.12",
"end_time": "2025-07-08T00:17:25.962985+00:00Z",
"host": "managed-node1",
"message": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"start_time": "2025-07-08T00:17:25.943617+00:00Z",
"task_name": "Parse quadlet file",
"task_path": "/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12"
},
{
"ansible_version": "2.17.12",
"delta": "0:00:00.039949",
"end_time": "2025-07-07 20:17:27.059651",
"host": "managed-node1",
"message": "",
"rc": 0,
"start_time": "2025-07-07 20:17:27.019702",
"stdout": "Jul 07 20:12:50 managed-node1 podman[35538]: 2025-07-07 20:12:50.361351804 -0400 EDT m=+0.036190231 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:50 managed-node1 podman[35538]: 2025-07-07 20:12:50.374459829 -0400 EDT m=+0.049298407 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4145.\nJul 07 20:12:50 managed-node1 podman[35538]: 2025-07-07 20:12:50.422744257 -0400 EDT m=+0.097582608 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35595]: This container is intended for podman CI testing\nJul 07 20:12:50 managed-node1 podman[35538]: 2025-07-07 20:12:50.42588667 -0400 EDT m=+0.100725056 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:50 managed-node1 conmon[35595]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:50 managed-node1 podman[35606]: 2025-07-07 20:12:50.477186688 -0400 EDT m=+0.035333022 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:50 managed-node1 podman[35606]: 2025-07-07 20:12:50.491409753 -0400 EDT m=+0.049556363 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4150.\nJul 07 20:12:50 managed-node1 podman[35606]: 2025-07-07 20:12:50.552263881 -0400 EDT m=+0.110410323 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35663]: This container is intended for podman CI testing\nJul 07 20:12:50 managed-node1 conmon[35663]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:50 managed-node1 podman[35606]: 2025-07-07 20:12:50.557866714 -0400 EDT m=+0.116012971 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:50 managed-node1 podman[35686]: 2025-07-07 20:12:50.620063202 -0400 EDT m=+0.048519999 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:50 managed-node1 podman[35686]: 2025-07-07 20:12:50.633249443 -0400 EDT m=+0.061706156 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4155.\nJul 07 20:12:50 managed-node1 podman[35686]: 2025-07-07 20:12:50.692351489 -0400 EDT m=+0.120808216 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35704]: This container is intended for podman CI testing\nJul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:50 managed-node1 podman[35686]: 2025-07-07 20:12:50.69827612 -0400 EDT m=+0.126732782 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:50 managed-node1 python3.9[35698]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:50 managed-node1 podman[35708]: 2025-07-07 20:12:50.759561182 -0400 EDT m=+0.046849024 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:50 managed-node1 podman[35708]: 2025-07-07 20:12:50.77735646 -0400 EDT m=+0.064644058 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4160.\nJul 07 20:12:50 managed-node1 podman[35708]: 2025-07-07 20:12:50.833194605 -0400 EDT m=+0.120482053 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:50 managed-node1 podman[35708]: 2025-07-07 20:12:50.836115952 -0400 EDT m=+0.123403410 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35741]: This container is intended for podman CI testing\nJul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:50 managed-node1 podman[35748]: 2025-07-07 20:12:50.869261632 -0400 EDT m=+0.023764777 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:50 managed-node1 podman[35748]: 2025-07-07 20:12:50.881551249 -0400 EDT m=+0.036054336 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4165.\nJul 07 20:12:50 managed-node1 podman[35748]: 2025-07-07 20:12:50.928402597 -0400 EDT m=+0.082905709 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:50 managed-node1 podman[35748]: 2025-07-07 20:12:50.931314876 -0400 EDT m=+0.085818011 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35759]: This container is intended for podman CI testing\nJul 07 20:12:50 managed-node1 conmon[35759]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:50 managed-node1 podman[35763]: 2025-07-07 20:12:50.96332082 -0400 EDT m=+0.021467626 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:50 managed-node1 podman[35763]: 2025-07-07 20:12:50.976815265 -0400 EDT m=+0.034961991 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:51 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4170.\nJul 07 20:12:51 managed-node1 podman[35763]: 2025-07-07 20:12:51.044375911 -0400 EDT m=+0.102522824 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:51 managed-node1 auth_test_1_kube-auth_test_1_kube[35798]: This container is intended for podman CI testing\nJul 07 20:12:51 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:51 managed-node1 podman[35763]: 2025-07-07 20:12:51.049348235 -0400 EDT m=+0.107495274 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:51 managed-node1 podman[35825]: 2025-07-07 20:12:51.092437769 -0400 EDT m=+0.029222529 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:51 managed-node1 podman[35825]: 2025-07-07 20:12:51.107275694 -0400 EDT m=+0.044060322 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:51 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4175.\nJul 07 20:12:51 managed-node1 podman[35825]: 2025-07-07 20:12:51.15929211 -0400 EDT m=+0.096076800 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:51 managed-node1 podman[35825]: 2025-07-07 20:12:51.163527993 -0400 EDT m=+0.100312554 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:51 managed-node1 auth_test_1_kube-auth_test_1_kube[35867]: This container is intended for podman CI testing\nJul 07 20:12:51 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:51 managed-node1 conmon[35867]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:51 managed-node1 podman[35894]: 2025-07-07 20:12:51.225315735 -0400 EDT m=+0.048836182 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:51 managed-node1 podman[35894]: 2025-07-07 20:12:51.239386226 -0400 EDT m=+0.062906669 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:51 managed-node1 python3.9[35925]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None\nJul 07 20:12:51 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4180.\nJul 07 20:12:51 managed-node1 podman[35894]: 2025-07-07 20:12:51.466820809 -0400 EDT m=+0.290341396 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:51 managed-node1 podman[35894]: 2025-07-07 20:12:51.470785368 -0400 EDT m=+0.294305786 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:51 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:51 managed-node1 auth_test_1_kube-auth_test_1_kube[35931]: This container is intended for podman CI testing\nJul 07 20:12:51 managed-node1 conmon[35931]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:51 managed-node1 systemd[1]: Reloading.\nJul 07 20:12:51 managed-node1 podman[35937]: 2025-07-07 20:12:51.552775836 -0400 EDT m=+0.056358134 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:51 managed-node1 podman[35937]: 2025-07-07 20:12:51.570347566 -0400 EDT m=+0.073929520 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:51 managed-node1 systemd-rc-local-generator[35966]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:12:51 managed-node1 systemd[1]: Stopping A template for running K8s workloads via podman-kube-play...\n\u2591\u2591 Subject: A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4185.\nJul 07 20:12:51 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4186.\nJul 07 20:12:51 managed-node1 podman[35937]: 2025-07-07 20:12:51.780283168 -0400 EDT m=+0.283865102 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:51 managed-node1 auth_test_1_kube-auth_test_1_kube[35985]: This container is intended for podman CI testing\nJul 07 20:12:51 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:51 managed-node1 podman[35937]: 2025-07-07 20:12:51.794156305 -0400 EDT m=+0.297738289 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:51 managed-node1 podman[35983]: 2025-07-07 20:12:51.805487564 -0400 EDT m=+0.042136278 pod stop 2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7 (image=, name=auth_test_1_kube)\nJul 07 20:12:51 managed-node1 podman[35983]: 2025-07-07 20:12:51.807535435 -0400 EDT m=+0.044184379 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:51 managed-node1 systemd[1]: var-lib-containers-storage-overlay-6e8d6012bb184b7d9b4ca98710c13c4398e87e9e6ee6aa4809430868bbb52621-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-6e8d6012bb184b7d9b4ca98710c13c4398e87e9e6ee6aa4809430868bbb52621-merged.mount has successfully entered the 'dead' state.\nJul 07 20:12:51 managed-node1 podman[35983]: 2025-07-07 20:12:51.856254498 -0400 EDT m=+0.092903180 container cleanup b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:51 managed-node1 systemd[1]: libpod-feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631.scope has successfully entered the 'dead' state.\nJul 07 20:12:51 managed-node1 podman[35983]: 2025-07-07 20:12:51.866791467 -0400 EDT m=+0.103440452 container died feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631 (image=, name=2f0bc5d17106-infra, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:51 managed-node1 systemd[1]: run-r8208ec8fcd4d4202907a4db922529a4b.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-r8208ec8fcd4d4202907a4db922529a4b.scope has successfully entered the 'dead' state.\nJul 07 20:12:51 managed-node1 kernel: podman1: port 1(veth1) entered disabled state\nJul 07 20:12:51 managed-node1 kernel: veth1 (unregistering): left allmulticast mode\nJul 07 20:12:51 managed-node1 kernel: veth1 (unregistering): left promiscuous mode\nJul 07 20:12:51 managed-node1 kernel: podman1: port 1(veth1) entered disabled state\nJul 07 20:12:51 managed-node1 NetworkManager[642]: [1751933571.9144] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed')\nJul 07 20:12:51 managed-node1 systemd[1]: Starting Network Manager Script Dispatcher Service...\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4192.\nJul 07 20:12:51 managed-node1 systemd[1]: Started Network Manager Script Dispatcher Service.\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4192.\nJul 07 20:12:52 managed-node1 systemd[1]: run-netns-netns\\x2d2411dc39\\x2d3430\\x2d1e50\\x2dd025\\x2d1a201717aa6d.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-netns-netns\\x2d2411dc39\\x2d3430\\x2d1e50\\x2dd025\\x2d1a201717aa6d.mount has successfully entered the 'dead' state.\nJul 07 20:12:52 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631-rootfs-merge.mount has successfully entered the 'dead' state.\nJul 07 20:12:52 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.071289258 -0400 EDT m=+0.307937965 container cleanup feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631 (image=, name=2f0bc5d17106-infra, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:52 managed-node1 systemd[1]: Removed slice cgroup machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice.\n\u2591\u2591 Subject: A stop job for unit machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4258 and the job result is done.\nJul 07 20:12:52 managed-node1 systemd[1]: machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice: Consumed 1.746s CPU time.\n\u2591\u2591 Subject: Resources consumed by unit runtime\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice completed and consumed the indicated resources.\nJul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.098281515 -0400 EDT m=+0.334930231 container remove b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.124590961 -0400 EDT m=+0.361239677 container remove feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631 (image=, name=2f0bc5d17106-infra, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:52 managed-node1 systemd[1]: machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice: Failed to open /run/systemd/transient/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice: No such file or directory\nJul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.134201698 -0400 EDT m=+0.370850385 pod remove 2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7 (image=, name=auth_test_1_kube)\nJul 07 20:12:52 managed-node1 podman[35983]: Pods stopped:\nJul 07 20:12:52 managed-node1 podman[35983]: 2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7\nJul 07 20:12:52 managed-node1 podman[35983]: Pods removed:\nJul 07 20:12:52 managed-node1 podman[35983]: 2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7\nJul 07 20:12:52 managed-node1 podman[35983]: Secrets removed:\nJul 07 20:12:52 managed-node1 podman[35983]: Volumes removed:\nJul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.138635694 -0400 EDT m=+0.375284640 container kill 782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc (image=, name=3ef6fcac6278-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:52 managed-node1 systemd[1]: libpod-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc.scope has successfully entered the 'dead' state.\nJul 07 20:12:52 managed-node1 conmon[20791]: conmon 782aa8ef72d65d91e61b : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/libpod-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc.scope/container/memory.events\nJul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.145356852 -0400 EDT m=+0.382005666 container died 782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc (image=, name=3ef6fcac6278-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:52 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc-rootfs-merge.mount has successfully entered the 'dead' state.\nJul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.214468795 -0400 EDT m=+0.451117504 container remove 782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc (image=, name=3ef6fcac6278-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:52 managed-node1 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has successfully entered the 'dead' state.\nJul 07 20:12:52 managed-node1 systemd[1]: Stopped A template for running K8s workloads via podman-kube-play.\n\u2591\u2591 Subject: A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4185 and the job result is done.\nJul 07 20:12:52 managed-node1 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service: Consumed 36.014s CPU time.\n\u2591\u2591 Subject: Resources consumed by unit runtime\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service completed and consumed the indicated resources.\nJul 07 20:12:52 managed-node1 python3.9[36210]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:52 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:12:53 managed-node1 python3.9[36361]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 07 20:12:53 managed-node1 python3.9[36361]: ansible-containers.podman.podman_play version: 5.5.1, kube file /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml\nJul 07 20:12:53 managed-node1 python3.9[36523]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:54 managed-node1 python3.9[36672]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:56 managed-node1 python3.9[36823]: ansible-systemd Invoked with name=auth_test_1_quadlet.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None\nJul 07 20:12:56 managed-node1 systemd[1]: Reloading.\nJul 07 20:12:56 managed-node1 systemd-rc-local-generator[36842]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:12:57 managed-node1 python3.9[37008]: ansible-stat Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:57 managed-node1 python3.9[37308]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:59 managed-node1 python3.9[37607]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:00 managed-node1 python3.9[37762]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:01 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nJul 07 20:13:03 managed-node1 python3.9[37913]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:05 managed-node1 python3.9[38064]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:06 managed-node1 python3.9[38215]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:07 managed-node1 python3.9[38366]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:08 managed-node1 python3.9[38516]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None\nJul 07 20:13:08 managed-node1 python3.9[38667]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:09 managed-node1 python3.9[38816]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:10 managed-node1 python3.9[38965]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:12 managed-node1 python3.9[39116]: ansible-systemd Invoked with name=auth_test_1_quadlet.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None\nJul 07 20:13:12 managed-node1 systemd[1]: Reloading.\nJul 07 20:13:12 managed-node1 systemd-rc-local-generator[39137]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:13:12 managed-node1 python3.9[39301]: ansible-stat Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:13 managed-node1 python3.9[39601]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:15 managed-node1 python3.9[39900]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:16 managed-node1 python3.9[40054]: ansible-getent Invoked with database=passwd key=auth_test_user1 fail_key=False service=None split=None\nJul 07 20:13:17 managed-node1 python3.9[40204]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:17 managed-node1 python3.9[40354]: ansible-user Invoked with name=auth_test_user1 state=absent non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None\nJul 07 20:13:18 managed-node1 python3.9[40504]: ansible-file Invoked with path=/home/auth_test_user1 state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:18 managed-node1 python3.9[40653]: ansible-ansible.legacy.command Invoked with _raw_params=podman inspect podman_registry --format '{{range .}}{{range .Mounts}}{{if eq .Type \"volume\"}}{{.Name}}{{end}}{{end}}{{end}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:19 managed-node1 python3.9[40810]: ansible-ansible.legacy.command Invoked with _raw_params=podman rm -f podman_registry _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:19 managed-node1 systemd[1]: libpod-2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b.scope has successfully entered the 'dead' state.\nJul 07 20:13:19 managed-node1 podman[40811]: 2025-07-07 20:13:19.265873387 -0400 EDT m=+0.045935077 container died 2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b (image=quay.io/libpod/registry:2.8.2, name=podman_registry)\nJul 07 20:13:19 managed-node1 kernel: podman0: port 1(veth0) entered disabled state\nJul 07 20:13:19 managed-node1 kernel: veth0 (unregistering): left allmulticast mode\nJul 07 20:13:19 managed-node1 kernel: veth0 (unregistering): left promiscuous mode\nJul 07 20:13:19 managed-node1 kernel: podman0: port 1(veth0) entered disabled state\nJul 07 20:13:19 managed-node1 NetworkManager[642]: [1751933599.3183] device (podman0): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed')\nJul 07 20:13:19 managed-node1 systemd[1]: Starting Network Manager Script Dispatcher Service...\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4260.\nJul 07 20:13:19 managed-node1 systemd[1]: Started Network Manager Script Dispatcher Service.\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4260.\nJul 07 20:13:19 managed-node1 systemd[1]: run-netns-netns\\x2da357660f\\x2d51e9\\x2def21\\x2deb3c\\x2d7f281ab8e18b.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-netns-netns\\x2da357660f\\x2d51e9\\x2def21\\x2deb3c\\x2d7f281ab8e18b.mount has successfully entered the 'dead' state.\nJul 07 20:13:19 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:13:19 managed-node1 systemd[1]: var-lib-containers-storage-overlay-112b09f11ff8fbba8e451f27769fa8e700d9bb89038833f136b238148dff37fa-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-112b09f11ff8fbba8e451f27769fa8e700d9bb89038833f136b238148dff37fa-merged.mount has successfully entered the 'dead' state.\nJul 07 20:13:19 managed-node1 podman[40811]: 2025-07-07 20:13:19.49814603 -0400 EDT m=+0.278207625 container remove 2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b (image=quay.io/libpod/registry:2.8.2, name=podman_registry)\nJul 07 20:13:19 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:13:19 managed-node1 systemd[1]: libpod-conmon-2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-conmon-2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b.scope has successfully entered the 'dead' state.\nJul 07 20:13:19 managed-node1 python3.9[41023]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume rm 1e074241011384a8157c17bad24c616d5279de9c3f24494baf6b8341065b25b1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:19 managed-node1 podman[41024]: 2025-07-07 20:13:19.910047174 -0400 EDT m=+0.025683882 volume remove 1e074241011384a8157c17bad24c616d5279de9c3f24494baf6b8341065b25b1\nJul 07 20:13:20 managed-node1 python3.9[41180]: ansible-file Invoked with path=/tmp/lsr_g7zmh6pf_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:22 managed-node1 python3.9[41378]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d\nJul 07 20:13:23 managed-node1 python3.9[41553]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:23 managed-node1 python3.9[41702]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:25 managed-node1 python3.9[42000]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:26 managed-node1 python3.9[42155]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:13:26 managed-node1 python3.9[42305]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:28 managed-node1 python3.9[42456]: ansible-tempfile Invoked with state=directory prefix=lsr_podman_config_ suffix= path=None\nJul 07 20:13:28 managed-node1 python3.9[42605]: ansible-ansible.legacy.command Invoked with _raw_params=tar --ignore-failed-read -c -P -v -p -f /tmp/lsr_podman_config_630o0ml1/backup.tar /etc/containers/containers.conf.d/50-systemroles.conf /etc/containers/registries.conf.d/50-systemroles.conf /etc/containers/storage.conf /etc/containers/policy.json _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:29 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nJul 07 20:13:29 managed-node1 python3.9[42755]: ansible-user Invoked with name=user1 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None\nJul 07 20:13:29 managed-node1 useradd[42757]: new group: name=user1, GID=1000\nJul 07 20:13:29 managed-node1 useradd[42757]: new user: name=user1, UID=1000, GID=1000, home=/home/user1, shell=/bin/bash, from=/dev/pts/0\nJul 07 20:13:31 managed-node1 python3.9[43061]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:32 managed-node1 python3.9[43217]: ansible-getent Invoked with database=passwd key=user1 fail_key=False service=None split=None\nJul 07 20:13:33 managed-node1 python3.9[43367]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:33 managed-node1 python3.9[43518]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:33 managed-node1 python3.9[43668]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:34 managed-node1 python3.9[43818]: ansible-file Invoked with path=/home/user1/.config/containers/containers.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:35 managed-node1 python3.9[43967]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:35 managed-node1 python3.9[44087]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933614.9410055-13310-24639621612646/.source.conf _original_basename=.lefyhyea follow=False checksum=b1776092f2908d76e11fd6af87267469b2c17d5a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:36 managed-node1 python3.9[44236]: ansible-file Invoked with path=/home/user1/.config/containers/registries.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:36 managed-node1 python3.9[44385]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:36 managed-node1 python3.9[44505]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933616.3031077-13365-95635820072900/.source.conf _original_basename=.79pds_qw follow=False checksum=fde25488ce7040f1639af7bfc88ed125318cc0b0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:37 managed-node1 python3.9[44654]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:37 managed-node1 python3.9[44803]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:38 managed-node1 python3.9[44923]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/storage.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933617.4859679-13409-167338433742499/.source.conf _original_basename=.5__9m3z1 follow=False checksum=38f015f4780579bd388dd955b42916199fd7fe19 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:38 managed-node1 python3.9[45072]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:38 managed-node1 python3.9[45221]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:39 managed-node1 python3.9[45370]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:39 managed-node1 python3.9[45490]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/policy.json owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933619.023253-13468-75507327963883/.source.json _original_basename=.gsbubo1d follow=False checksum=6746c079ad563b735fc39f73d4876654b80b0a0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:40 managed-node1 python3.9[45639]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:40 managed-node1 python3.9[45790]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:41 managed-node1 python3.9[45940]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:41 managed-node1 python3.9[46090]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:43 managed-node1 python3.9[46508]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:43 managed-node1 python3.9[46659]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:44 managed-node1 python3.9[46809]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:45 managed-node1 python3.9[46959]: ansible-stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:45 managed-node1 python3.9[47110]: ansible-stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:45 managed-node1 python3.9[47261]: ansible-stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:46 managed-node1 python3.9[47412]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:46 managed-node1 python3.9[47563]: ansible-ansible.legacy.command Invoked with _raw_params=grep 'container_name_as_hostname[ ]*=[ ]*true' /home/user1/.config/containers/containers.conf.d/50-systemroles.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:48 managed-node1 python3.9[47862]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:49 managed-node1 python3.9[48017]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:49 managed-node1 python3.9[48168]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:49 managed-node1 python3.9[48318]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:50 managed-node1 python3.9[48468]: ansible-file Invoked with path=/home/user1/.config/containers/containers.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:51 managed-node1 python3.9[48617]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:51 managed-node1 python3.9[48692]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf _original_basename=.yuv1oz8a recurse=False state=file path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:52 managed-node1 python3.9[48841]: ansible-file Invoked with path=/home/user1/.config/containers/registries.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:52 managed-node1 python3.9[48990]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:52 managed-node1 python3.9[49065]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf _original_basename=.t7udz4o_ recurse=False state=file path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:53 managed-node1 python3.9[49214]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:53 managed-node1 python3.9[49363]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:53 managed-node1 python3.9[49438]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/storage.conf _original_basename=.acx818hv recurse=False state=file path=/home/user1/.config/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:54 managed-node1 python3.9[49587]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:54 managed-node1 python3.9[49736]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:55 managed-node1 python3.9[49887]: ansible-slurp Invoked with path=/home/user1/.config/containers/policy.json src=/home/user1/.config/containers/policy.json\nJul 07 20:13:56 managed-node1 python3.9[50036]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:56 managed-node1 python3.9[50187]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:56 managed-node1 python3.9[50337]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:57 managed-node1 python3.9[50487]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:59 managed-node1 python3.9[50860]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:59 managed-node1 python3.9[51011]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:00 managed-node1 python3.9[51161]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:01 managed-node1 python3.9[51311]: ansible-stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:01 managed-node1 python3.9[51462]: ansible-stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:01 managed-node1 python3.9[51613]: ansible-stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:02 managed-node1 python3.9[51764]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:03 managed-node1 python3.9[52064]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:04 managed-node1 python3.9[52219]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:14:05 managed-node1 python3.9[52369]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:05 managed-node1 python3.9[52520]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:06 managed-node1 python3.9[52669]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:06 managed-node1 python3.9[52789]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/containers.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933646.0899522-14266-164202167319133/.source.conf _original_basename=.f55n9smu follow=False checksum=b1776092f2908d76e11fd6af87267469b2c17d5a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:07 managed-node1 python3.9[52938]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:07 managed-node1 python3.9[53087]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:07 managed-node1 python3.9[53207]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/registries.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933647.2349114-14295-264742941497318/.source.conf _original_basename=.kaocflcp follow=False checksum=fde25488ce7040f1639af7bfc88ed125318cc0b0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:08 managed-node1 python3.9[53356]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:08 managed-node1 python3.9[53505]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:08 managed-node1 python3.9[53627]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/storage.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933648.3819814-14329-124138118490068/.source.conf _original_basename=.cw2ofq3l follow=False checksum=38f015f4780579bd388dd955b42916199fd7fe19 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:09 managed-node1 python3.9[53776]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:09 managed-node1 python3.9[53925]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:10 managed-node1 python3.9[54076]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json\nJul 07 20:14:10 managed-node1 python3.9[54225]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:10 managed-node1 python3.9[54347]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/policy.json owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933650.3431063-14385-203115853451002/.source.json _original_basename=.9ge15xwj follow=False checksum=6746c079ad563b735fc39f73d4876654b80b0a0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:11 managed-node1 python3.9[54496]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:12 managed-node1 python3.9[54647]: ansible-file Invoked with path=/root/.config/containers state=directory owner=root group=0 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:14 managed-node1 python3.9[55067]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:15 managed-node1 python3.9[55218]: ansible-stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:15 managed-node1 python3.9[55369]: ansible-stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:16 managed-node1 python3.9[55520]: ansible-stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:16 managed-node1 python3.9[55671]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:18 managed-node1 python3.9[55971]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:19 managed-node1 python3.9[56126]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:20 managed-node1 python3.9[56277]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:21 managed-node1 python3.9[56426]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:21 managed-node1 python3.9[56501]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/containers.conf.d/50-systemroles.conf _original_basename=.05q0dgxv recurse=False state=file path=/etc/containers/containers.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:22 managed-node1 python3.9[56650]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:22 managed-node1 python3.9[56799]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:22 managed-node1 python3.9[56874]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/registries.conf.d/50-systemroles.conf _original_basename=.p8krhe9y recurse=False state=file path=/etc/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:23 managed-node1 python3.9[57023]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:23 managed-node1 python3.9[57172]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:24 managed-node1 python3.9[57247]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/storage.conf _original_basename=.41qn4gp5 recurse=False state=file path=/etc/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:24 managed-node1 python3.9[57396]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:24 managed-node1 python3.9[57545]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:25 managed-node1 python3.9[57696]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json\nJul 07 20:14:26 managed-node1 python3.9[57845]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:27 managed-node1 python3.9[57996]: ansible-file Invoked with path=/root/.config/containers state=directory owner=root group=0 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:28 managed-node1 python3.9[58369]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:29 managed-node1 python3.9[58520]: ansible-stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:30 managed-node1 python3.9[58671]: ansible-stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:30 managed-node1 python3.9[58822]: ansible-stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:30 managed-node1 python3.9[58973]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:31 managed-node1 python3.9[59124]: ansible-slurp Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf src=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf\nJul 07 20:14:32 managed-node1 python3.9[59273]: ansible-slurp Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf src=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf\nJul 07 20:14:32 managed-node1 python3.9[59422]: ansible-slurp Invoked with path=/home/user1/.config/containers/storage.conf src=/home/user1/.config/containers/storage.conf\nJul 07 20:14:33 managed-node1 python3.9[59571]: ansible-slurp Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf src=/etc/containers/containers.conf.d/50-systemroles.conf\nJul 07 20:14:33 managed-node1 python3.9[59720]: ansible-slurp Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf src=/etc/containers/registries.conf.d/50-systemroles.conf\nJul 07 20:14:34 managed-node1 python3.9[59869]: ansible-slurp Invoked with path=/etc/containers/storage.conf src=/etc/containers/storage.conf\nJul 07 20:14:35 managed-node1 python3.9[60167]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:37 managed-node1 python3.9[60322]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:38 managed-node1 python3.9[60473]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:38 managed-node1 python3.9[60622]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:38 managed-node1 python3.9[60744]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/containers.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933678.2505736-15338-238322287102956/.source.conf _original_basename=.8f6zzn7v follow=False checksum=9694c1d1c700a6435eecf4066b052584f4ee94c0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:39 managed-node1 python3.9[60893]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:39 managed-node1 python3.9[61042]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:40 managed-node1 python3.9[61117]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/registries.conf.d/50-systemroles.conf _original_basename=.ttqitrgk recurse=False state=file path=/etc/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:40 managed-node1 python3.9[61266]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:41 managed-node1 python3.9[61415]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:41 managed-node1 python3.9[61490]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/storage.conf _original_basename=.chl3shxe recurse=False state=file path=/etc/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:41 managed-node1 python3.9[61639]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:42 managed-node1 python3.9[61788]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:42 managed-node1 python3.9[61939]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json\nJul 07 20:14:43 managed-node1 python3.9[62088]: ansible-slurp Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf src=/etc/containers/containers.conf.d/50-systemroles.conf\nJul 07 20:14:43 managed-node1 python3.9[62237]: ansible-file Invoked with state=absent path=/etc/containers/containers.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:44 managed-node1 python3.9[62386]: ansible-file Invoked with state=absent path=/etc/containers/registries.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:44 managed-node1 python3.9[62535]: ansible-file Invoked with state=absent path=/etc/containers/storage.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:44 managed-node1 python3.9[62684]: ansible-file Invoked with state=absent path=/etc/containers/policy.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:45 managed-node1 python3.9[62833]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:45 managed-node1 python3.9[62982]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:45 managed-node1 python3.9[63131]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/storage.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:46 managed-node1 python3.9[63280]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/policy.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:46 managed-node1 python3.9[63429]: ansible-file Invoked with state=absent path=/root/.config/containers/auth.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:46 managed-node1 python3.9[63578]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/auth.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:47 managed-node1 python3.9[63727]: ansible-ansible.legacy.command Invoked with _raw_params=tar xfvpP /tmp/lsr_podman_config_630o0ml1/backup.tar _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:47 managed-node1 python3.9[63877]: ansible-file Invoked with state=absent path=/tmp/lsr_podman_config_630o0ml1 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:49 managed-node1 python3.9[64075]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version', 'os_family'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d\nJul 07 20:14:50 managed-node1 python3.9[64226]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:50 managed-node1 python3.9[64375]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:52 managed-node1 python3.9[64673]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:53 managed-node1 python3.9[64828]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:14:53 managed-node1 python3.9[64978]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:57 managed-node1 python3.9[65178]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d\nJul 07 20:15:00 managed-node1 python3.9[65353]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:00 managed-node1 python3.9[65502]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:02 managed-node1 python3.9[65800]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:03 managed-node1 python3.9[65955]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:15:04 managed-node1 python3.9[66105]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:09 managed-node1 python3.9[66305]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d\nJul 07 20:15:10 managed-node1 python3.9[66480]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:10 managed-node1 python3.9[66629]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:12 managed-node1 python3.9[66927]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:13 managed-node1 python3.9[67082]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:15:14 managed-node1 python3.9[67232]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:16 managed-node1 python3.9[67383]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:17 managed-node1 python3.9[67534]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:18 managed-node1 python3.9[67683]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/nopull.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:15:18 managed-node1 python3.9[67803]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933717.8906143-17219-30430442070937/.source.container dest=/etc/containers/systemd/nopull.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=670d64fc68a9768edb20cad26df2acc703542d85 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:20 managed-node1 python3.9[68101]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:21 managed-node1 python3.9[68256]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:23 managed-node1 python3.9[68407]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:15:24 managed-node1 podman[68566]: 2025-07-07 20:15:24.692519338 -0400 EDT m=+0.023060903 image pull-error this_is_a_bogus_image:latest short-name resolution enforced but cannot prompt without a TTY\nJul 07 20:15:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:15:25 managed-node1 python3.9[68721]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:25 managed-node1 python3.9[68870]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/bogus.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:15:25 managed-node1 python3.9[68990]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933725.2913904-17409-127642446506455/.source.container dest=/etc/containers/systemd/bogus.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=1d087e679d135214e8ac9ccaf33b2222916efb7f backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:28 managed-node1 python3.9[69288]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:29 managed-node1 python3.9[69443]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:32 managed-node1 python3.9[69594]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:33 managed-node1 python3.9[69745]: ansible-systemd Invoked with name=nopull.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None\nJul 07 20:15:33 managed-node1 python3.9[69895]: ansible-stat Invoked with path=/etc/containers/systemd/nopull.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:34 managed-node1 python3.9[70195]: ansible-ansible.legacy.command Invoked with _raw_params=set -x\n set -o pipefail\n exec 1>&2\n #podman volume rm --all\n #podman network prune -f\n podman volume ls\n podman network ls\n podman secret ls\n podman container ls\n podman pod ls\n podman images\n systemctl list-units | grep quadlet\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:34 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:15:34 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:15:35 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:15:36 managed-node1 python3.9[70540]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:37 managed-node1 python3.9[70695]: ansible-getent Invoked with database=passwd key=user_quadlet_basic fail_key=False service=None split=None\nJul 07 20:15:37 managed-node1 python3.9[70845]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:39 managed-node1 python3.9[71044]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d\nJul 07 20:15:40 managed-node1 python3.9[71219]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:40 managed-node1 python3.9[71368]: ansible-ansible.legacy.command Invoked with _raw_params=systemctl is-system-running _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:41 managed-node1 python3.9[71518]: ansible-ansible.legacy.dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nJul 07 20:15:42 managed-node1 python3.9[71668]: ansible-ansible.legacy.dnf Invoked with name=['certmonger', 'python3-packaging'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nJul 07 20:15:44 managed-node1 python3.9[71818]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:44 managed-node1 python3.9[71967]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:45 managed-node1 python3.9[72116]: ansible-ansible.legacy.systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None\nJul 07 20:15:46 managed-node1 python3.9[72267]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=#\n # Ansible managed\n #\n # system_role:certificate\n booted=True provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 certmonger[72282]: Certificate in file \"/etc/pki/tls/certs/quadlet_demo.crt\" issued by CA and saved.\nJul 07 20:15:46 managed-node1 certmonger[12020]: 2025-07-07 20:15:46 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:46 managed-node1 python3.9[72431]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt\nJul 07 20:15:47 managed-node1 python3.9[72580]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key\nJul 07 20:15:47 managed-node1 python3.9[72729]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt\nJul 07 20:15:48 managed-node1 python3.9[72878]: ansible-ansible.legacy.command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:48 managed-node1 certmonger[12020]: 2025-07-07 20:15:48 [12020] Wrote to /var/lib/certmonger/requests/20250708001546\nJul 07 20:15:48 managed-node1 python3.9[73028]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:48 managed-node1 python3.9[73177]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:49 managed-node1 python3.9[73326]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:49 managed-node1 python3.9[73475]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:50 managed-node1 python3.9[73624]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:51 managed-node1 python3.9[73922]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:52 managed-node1 python3.9[74077]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:15:52 managed-node1 python3.9[74227]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:54 managed-node1 python3.9[74378]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:54 managed-node1 python3.9[74527]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:55 managed-node1 python3.9[74676]: ansible-ansible.legacy.command Invoked with _raw_params=systemctl is-system-running _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:55 managed-node1 python3.9[74826]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nJul 07 20:15:57 managed-node1 python3.9[74976]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None\nJul 07 20:15:57 managed-node1 python3.9[75127]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None\nJul 07 20:15:57 managed-node1 systemd[1]: Reloading.\nJul 07 20:15:57 managed-node1 systemd-rc-local-generator[75147]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:15:58 managed-node1 systemd[1]: Starting firewalld - dynamic firewall daemon...\n\u2591\u2591 Subject: A start job for unit firewalld.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit firewalld.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4326.\nJul 07 20:15:58 managed-node1 systemd[1]: Started firewalld - dynamic firewall daemon.\n\u2591\u2591 Subject: A start job for unit firewalld.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit firewalld.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4326.\nJul 07 20:15:58 managed-node1 quadlet-generator[75135]: Warning: bogus.container specifies the image \"this_is_a_bogus_image\" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.\nJul 07 20:15:58 managed-node1 kernel: Warning: Unmaintained driver is detected: ip_set\nJul 07 20:15:58 managed-node1 kernel: Warning: Unmaintained driver is detected: ip_set_init\nJul 07 20:15:59 managed-node1 python3.9[75351]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 07 20:15:59 managed-node1 python3.9[75500]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 07 20:15:59 managed-node1 rsyslogd[809]: imjournal: journal files changed, reloading... [v8.2412.0-2.el9 try https://www.rsyslog.com/e/0 ]\nJul 07 20:16:02 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:16:02 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:16:02 managed-node1 podman[75670]: 2025-07-07 20:16:02.087398298 -0400 EDT m=+0.019911969 secret create ea8c34f75eeeee13a33c9ef0e\nJul 07 20:16:03 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:16:03 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:16:03 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:16:03 managed-node1 podman[75848]: 2025-07-07 20:16:03.507704545 -0400 EDT m=+0.017383206 secret create cb2d4c65044c524adeead96e5\nJul 07 20:16:04 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:16:05 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:16:05 managed-node1 podman[76024]: 2025-07-07 20:16:05.052661564 -0400 EDT m=+0.019543154 secret create a95acde29e8944984a9c6b05c\nJul 07 20:16:05 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:16:06 managed-node1 python3.9[76180]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:16:07 managed-node1 python3.9[76331]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:08 managed-node1 python3.9[76480]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:16:08 managed-node1 python3.9[76600]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933767.7445745-18733-5242759390053/.source.network dest=/etc/containers/systemd/quadlet-demo.network owner=root group=0 mode=0644 _original_basename=quadlet-demo.network follow=False checksum=e57c08d49aff4bae8daab138d913aeddaa8682a0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:09 managed-node1 python3.9[76749]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nJul 07 20:16:09 managed-node1 systemd[1]: Reloading.\nJul 07 20:16:09 managed-node1 quadlet-generator[76755]: Warning: bogus.container specifies the image \"this_is_a_bogus_image\" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.\nJul 07 20:16:09 managed-node1 systemd-rc-local-generator[76767]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:16:09 managed-node1 python3.9[76932]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None\nJul 07 20:16:09 managed-node1 systemd[1]: Starting quadlet-demo-network.service...\n\u2591\u2591 Subject: A start job for unit quadlet-demo-network.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit quadlet-demo-network.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4397.\nJul 07 20:16:09 managed-node1 podman[76936]: 2025-07-07 20:16:09.813688266 -0400 EDT m=+0.023505174 network create e77a522f8940bc72322e47b9594fa31c2a9239c87a24a572992d50c2070722ac (name=systemd-quadlet-demo, type=bridge)\nJul 07 20:16:09 managed-node1 quadlet-demo-network[76936]: systemd-quadlet-demo\nJul 07 20:16:09 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:16:09 managed-node1 systemd[1]: Finished quadlet-demo-network.service.\n\u2591\u2591 Subject: A start job for unit quadlet-demo-network.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit quadlet-demo-network.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4397.\nJul 07 20:16:10 managed-node1 python3.9[77091]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:16:12 managed-node1 python3.9[77242]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:12 managed-node1 python3.9[77391]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:16:13 managed-node1 python3.9[77511]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933772.715237-18900-74746594643675/.source.volume dest=/etc/containers/systemd/quadlet-demo-mysql.volume owner=root group=0 mode=0644 _original_basename=quadlet-demo-mysql.volume follow=False checksum=585f8cbdf0ec73000f9227dcffbef71e9552ea4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:13 managed-node1 python3.9[77660]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nJul 07 20:16:13 managed-node1 systemd[1]: Reloading.\nJul 07 20:16:14 managed-node1 quadlet-generator[77666]: Warning: bogus.container specifies the image \"this_is_a_bogus_image\" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.\nJul 07 20:16:14 managed-node1 systemd-rc-local-generator[77678]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:16:14 managed-node1 python3.9[77843]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None\nJul 07 20:16:14 managed-node1 systemd[1]: Starting quadlet-demo-mysql-volume.service...\n\u2591\u2591 Subject: A start job for unit quadlet-demo-mysql-volume.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit quadlet-demo-mysql-volume.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4468.\nJul 07 20:16:14 managed-node1 podman[77847]: 2025-07-07 20:16:14.758975109 -0400 EDT m=+0.027693599 volume create systemd-quadlet-demo-mysql\nJul 07 20:16:14 managed-node1 quadlet-demo-mysql-volume[77847]: systemd-quadlet-demo-mysql\nJul 07 20:16:14 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:16:14 managed-node1 systemd[1]: Finished quadlet-demo-mysql-volume.service.\n\u2591\u2591 Subject: A start job for unit quadlet-demo-mysql-volume.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit quadlet-demo-mysql-volume.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4468.\nJul 07 20:16:15 managed-node1 python3.9[78004]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:16:17 managed-node1 python3.9[78155]: ansible-file Invoked with path=/tmp/quadlet_demo state=directory owner=root group=root mode=0777 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:17 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:16:23 managed-node1 podman[78312]: 2025-07-07 20:16:23.966027689 -0400 EDT m=+5.992362773 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6\nJul 07 20:16:23 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:16:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:16:24 managed-node1 python3.9[78638]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:24 managed-node1 python3.9[78787]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:16:25 managed-node1 python3.9[78907]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo-mysql.container owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933784.6711764-19256-243645638611479/.source.container _original_basename=.khut39a4 follow=False checksum=ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:25 managed-node1 python3.9[79056]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nJul 07 20:16:25 managed-node1 systemd[1]: Reloading.\nJul 07 20:16:25 managed-node1 quadlet-generator[79062]: Warning: bogus.container specifies the image \"this_is_a_bogus_image\" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.\nJul 07 20:16:25 managed-node1 systemd-rc-local-generator[79074]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:16:26 managed-node1 python3.9[79239]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None\nJul 07 20:16:26 managed-node1 systemd[1]: Starting quadlet-demo-mysql.service...\n\u2591\u2591 Subject: A start job for unit quadlet-demo-mysql.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit quadlet-demo-mysql.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4539.\nJul 07 20:16:26 managed-node1 podman[79243]: 2025-07-07 20:16:26.601319498 -0400 EDT m=+0.041646112 container create f33934011335e1e05e67f30d3833447877c8c3f24563f59e1979b85508143a46 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service)\nJul 07 20:16:26 managed-node1 systemd[1]: var-lib-containers-storage-overlay-volatile\\x2dcheck3897929246-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-volatile\\x2dcheck3897929246-merged.mount has successfully entered the 'dead' state.\nJul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.6259] manager: (podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/9)\nJul 07 20:16:26 managed-node1 kernel: podman2: port 1(veth0) entered blocking state\nJul 07 20:16:26 managed-node1 kernel: podman2: port 1(veth0) entered disabled state\nJul 07 20:16:26 managed-node1 kernel: veth0: entered allmulticast mode\nJul 07 20:16:26 managed-node1 kernel: veth0: entered promiscuous mode\nJul 07 20:16:26 managed-node1 kernel: podman2: port 1(veth0) entered blocking state\nJul 07 20:16:26 managed-node1 kernel: podman2: port 1(veth0) entered forwarding state\nJul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.6393] device (veth0): carrier: link connected\nJul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.6396] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/10)\nJul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.6400] device (podman2): carrier: link connected\nJul 07 20:16:26 managed-node1 systemd-udevd[79268]: Network interface NamePolicy= disabled on kernel command line.\nJul 07 20:16:26 managed-node1 systemd-udevd[79270]: Network interface NamePolicy= disabled on kernel command line.\nJul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.6722] device (podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')\nJul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.6735] device (podman2): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external')\nJul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.6747] device (podman2): Activation: starting connection 'podman2' (5d5ba419-8b7f-4670-b0eb-b66c00c4f6b0)\nJul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.6749] device (podman2): state change: disconnected -> prepare (reason 'none', managed-type: 'external')\nJul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.6756] device (podman2): state change: prepare -> config (reason 'none', managed-type: 'external')\nJul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.6771] device (podman2): state change: config -> ip-config (reason 'none', managed-type: 'external')\nJul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.6775] device (podman2): state change: ip-config -> ip-check (reason 'none', managed-type: 'external')\nJul 07 20:16:26 managed-node1 podman[79243]: 2025-07-07 20:16:26.585556736 -0400 EDT m=+0.025883528 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6\nJul 07 20:16:26 managed-node1 systemd[1]: Starting Network Manager Script Dispatcher Service...\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4613.\nJul 07 20:16:26 managed-node1 systemd[1]: Started Network Manager Script Dispatcher Service.\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4613.\nJul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.7052] device (podman2): state change: ip-check -> secondaries (reason 'none', managed-type: 'external')\nJul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.7056] device (podman2): state change: secondaries -> activated (reason 'none', managed-type: 'external')\nJul 07 20:16:26 managed-node1 NetworkManager[642]: [1751933786.7066] device (podman2): Activation: successful, device activated.\nJul 07 20:16:26 managed-node1 systemd[1]: Started /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run.\n\u2591\u2591 Subject: A start job for unit run-r79a358c8821241c5893b4f6e7846886b.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit run-r79a358c8821241c5893b4f6e7846886b.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4679.\nJul 07 20:16:26 managed-node1 systemd[1]: Started /usr/bin/podman healthcheck run f33934011335e1e05e67f30d3833447877c8c3f24563f59e1979b85508143a46.\n\u2591\u2591 Subject: A start job for unit f33934011335e1e05e67f30d3833447877c8c3f24563f59e1979b85508143a46-7e0e2196b6bede63.timer has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit f33934011335e1e05e67f30d3833447877c8c3f24563f59e1979b85508143a46-7e0e2196b6bede63.timer has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4683.\nJul 07 20:16:26 managed-node1 podman[79243]: 2025-07-07 20:16:26.838601985 -0400 EDT m=+0.278928697 container init f33934011335e1e05e67f30d3833447877c8c3f24563f59e1979b85508143a46 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service)\nJul 07 20:16:26 managed-node1 systemd[1]: Started quadlet-demo-mysql.service.\n\u2591\u2591 Subject: A start job for unit quadlet-demo-mysql.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit quadlet-demo-mysql.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4539.\nJul 07 20:16:26 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:26+00:00 [Note] [Entrypoint]: Entrypoint script for MySQL Server 5.6.51-1debian9 started.\nJul 07 20:16:26 managed-node1 podman[79243]: 2025-07-07 20:16:26.876531738 -0400 EDT m=+0.316858485 container start f33934011335e1e05e67f30d3833447877c8c3f24563f59e1979b85508143a46 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service)\nJul 07 20:16:26 managed-node1 quadlet-demo-mysql[79243]: f33934011335e1e05e67f30d3833447877c8c3f24563f59e1979b85508143a46\nJul 07 20:16:26 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:26+00:00 [Note] [Entrypoint]: Switching to dedicated user 'mysql'\nJul 07 20:16:26 managed-node1 podman[79339]: 2025-07-07 20:16:26.969487306 -0400 EDT m=+0.081760512 container health_status f33934011335e1e05e67f30d3833447877c8c3f24563f59e1979b85508143a46 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service)\nJul 07 20:16:26 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:26+00:00 [Note] [Entrypoint]: Entrypoint script for MySQL Server 5.6.51-1debian9 started.\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27+00:00 [Note] [Entrypoint]: Initializing database files\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 0 [Warning] TIMESTAMP with implicit DEFAULT value is deprecated. Please use --explicit_defaults_for_timestamp server option (see documentation for more details).\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 0 [Note] Ignoring --secure-file-priv value as server is running with --bootstrap.\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 0 [Note] /usr/sbin/mysqld (mysqld 5.6.51) starting as process 43 ...\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Using atomics to ref count buffer pool pages\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: The InnoDB memory heap is disabled\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Mutexes and rw_locks use GCC atomic builtins\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Memory barrier is not used\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Compressed tables use zlib 1.2.11\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Using Linux native AIO\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Using CPU crc32 instructions\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Initializing buffer pool, size = 128.0M\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Completed initialization of buffer pool\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: The first specified data file ./ibdata1 did not exist: a new database to be created!\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Setting file ./ibdata1 size to 12 MB\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Database physically writes the file full: wait...\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Setting log file ./ib_logfile101 size to 48 MB\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Setting log file ./ib_logfile1 size to 48 MB\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Renaming log file ./ib_logfile101 to ./ib_logfile0\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Warning] InnoDB: New log files created, LSN=45781\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Doublewrite buffer not found: creating new\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Doublewrite buffer created\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: 128 rollback segment(s) are active.\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Warning] InnoDB: Creating foreign key constraint system tables.\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Foreign key constraint system tables created\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Creating tablespace and datafile system tables.\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Tablespace and datafile system tables created.\nJul 07 20:16:27 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:27 43 [Note] InnoDB: Waiting for purge to start\nJul 07 20:16:27 managed-node1 python3.9[79551]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:16:28 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:28 43 [Note] InnoDB: 5.6.51 started; log sequence number 0\nJul 07 20:16:28 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:28 43 [Note] RSA private key file not found: /var/lib/mysql//private_key.pem. Some authentication plugins will not work.\nJul 07 20:16:28 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:28 43 [Note] RSA public key file not found: /var/lib/mysql//public_key.pem. Some authentication plugins will not work.\nJul 07 20:16:28 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:28 43 [Note] Binlog end\nJul 07 20:16:28 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:28 43 [Note] InnoDB: FTS optimize thread exiting.\nJul 07 20:16:28 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:28 43 [Note] InnoDB: Starting shutdown...\nJul 07 20:16:29 managed-node1 python3.9[79713]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 43 [Note] InnoDB: Shutdown completed; log sequence number 1625977\nJul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: \nJul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: \nJul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 0 [Warning] TIMESTAMP with implicit DEFAULT value is deprecated. Please use --explicit_defaults_for_timestamp server option (see documentation for more details).\nJul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 0 [Note] Ignoring --secure-file-priv value as server is running with --bootstrap.\nJul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 0 [Note] /usr/sbin/mysqld (mysqld 5.6.51) starting as process 66 ...\nJul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: Using atomics to ref count buffer pool pages\nJul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: The InnoDB memory heap is disabled\nJul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: Mutexes and rw_locks use GCC atomic builtins\nJul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: Memory barrier is not used\nJul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: Compressed tables use zlib 1.2.11\nJul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: Using Linux native AIO\nJul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: Using CPU crc32 instructions\nJul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: Initializing buffer pool, size = 128.0M\nJul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: Completed initialization of buffer pool\nJul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: Highest supported file format is Barracuda.\nJul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: 128 rollback segment(s) are active.\nJul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: Waiting for purge to start\nJul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: 5.6.51 started; log sequence number 1625977\nJul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] RSA private key file not found: /var/lib/mysql//private_key.pem. Some authentication plugins will not work.\nJul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] RSA public key file not found: /var/lib/mysql//public_key.pem. Some authentication plugins will not work.\nJul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] Binlog end\nJul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: FTS optimize thread exiting.\nJul 07 20:16:29 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:29 66 [Note] InnoDB: Starting shutdown...\nJul 07 20:16:30 managed-node1 python3.9[79886]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:16:30 managed-node1 python3.9[80006]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933789.8840272-19439-56658192497812/.source.yml dest=/etc/containers/systemd/envoy-proxy-configmap.yml owner=root group=0 mode=0644 _original_basename=envoy-proxy-configmap.yml follow=False checksum=d681c7d56f912150d041873e880818b22a90c188 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:31 managed-node1 python3.9[80155]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nJul 07 20:16:31 managed-node1 systemd[1]: Reloading.\nJul 07 20:16:31 managed-node1 quadlet-generator[80161]: Warning: bogus.container specifies the image \"this_is_a_bogus_image\" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.\nJul 07 20:16:31 managed-node1 systemd-rc-local-generator[80173]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 66 [Note] InnoDB: Shutdown completed; log sequence number 1625987\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: \nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: \nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: \nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: \nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: PLEASE REMEMBER TO SET A PASSWORD FOR THE MySQL root USER !\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: To do so, start the server, then issue the following commands:\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: \nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: /usr/bin/mysqladmin -u root password 'new-password'\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: /usr/bin/mysqladmin -u root -h f33934011335 password 'new-password'\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: \nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: Alternatively you can run:\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: \nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: /usr/bin/mysql_secure_installation\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: \nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: which will also give you the option of removing the test\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: databases and anonymous user created by default. This is\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: strongly recommended for production servers.\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: \nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: See the manual for more instructions.\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: \nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: Please report any problems at http://bugs.mysql.com/\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: \nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: The latest information about MySQL is available on the web at\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: \nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: http://www.mysql.com\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: \nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: Support MySQL by buying support/licenses at http://shop.mysql.com\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: \nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: Note: new default config file not created.\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: Please make sure your config file is current\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: \nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: WARNING: Default config file /etc/mysql/my.cnf exists on the system\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: This file will be read by default by the MySQL server\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: If you do not want to use this, either remove it, or use the\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: --defaults-file argument to mysqld_safe when starting the server\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: \nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31+00:00 [Note] [Entrypoint]: Database files initialized\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31+00:00 [Note] [Entrypoint]: Starting temporary server\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31+00:00 [Note] [Entrypoint]: Waiting for server startup\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 0 [Warning] TIMESTAMP with implicit DEFAULT value is deprecated. Please use --explicit_defaults_for_timestamp server option (see documentation for more details).\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 0 [Note] mysqld (mysqld 5.6.51) starting as process 91 ...\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] Plugin 'FEDERATED' is disabled.\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] InnoDB: Using atomics to ref count buffer pool pages\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] InnoDB: The InnoDB memory heap is disabled\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] InnoDB: Mutexes and rw_locks use GCC atomic builtins\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] InnoDB: Memory barrier is not used\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] InnoDB: Compressed tables use zlib 1.2.11\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] InnoDB: Using Linux native AIO\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] InnoDB: Using CPU crc32 instructions\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] InnoDB: Initializing buffer pool, size = 128.0M\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] InnoDB: Completed initialization of buffer pool\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] InnoDB: Highest supported file format is Barracuda.\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] InnoDB: 128 rollback segment(s) are active.\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:31 91 [Note] InnoDB: Waiting for purge to start\nJul 07 20:16:32 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:32 91 [Note] InnoDB: 5.6.51 started; log sequence number 1625987\nJul 07 20:16:32 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:32 91 [Warning] No existing UUID has been found, so we assume that this is the first time that this server has been started. Generating a new UUID: ccf1f618-5b90-11f0-9d45-165e203fac44.\nJul 07 20:16:32 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:32 91 [Note] RSA private key file not found: /var/lib/mysql//private_key.pem. Some authentication plugins will not work.\nJul 07 20:16:32 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:32 91 [Note] RSA public key file not found: /var/lib/mysql//public_key.pem. Some authentication plugins will not work.\nJul 07 20:16:32 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:32 91 [Warning] Insecure configuration for --pid-file: Location '/var/run/mysqld' in the path is accessible to all OS users. Consider choosing a different directory.\nJul 07 20:16:32 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:32 91 [Warning] 'user' entry 'root@f33934011335' ignored in --skip-name-resolve mode.\nJul 07 20:16:32 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:32 91 [Warning] 'user' entry '@f33934011335' ignored in --skip-name-resolve mode.\nJul 07 20:16:32 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:32 91 [Warning] 'proxies_priv' entry '@ root@f33934011335' ignored in --skip-name-resolve mode.\nJul 07 20:16:32 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:32 91 [Note] Event Scheduler: Loaded 0 events\nJul 07 20:16:32 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:32 91 [Note] mysqld: ready for connections.\nJul 07 20:16:32 managed-node1 quadlet-demo-mysql[79332]: Version: '5.6.51' socket: '/var/run/mysqld/mysqld.sock' port: 0 MySQL Community Server (GPL)\nJul 07 20:16:32 managed-node1 python3.9[80366]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:16:32 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:32+00:00 [Note] [Entrypoint]: Temporary server started.\nJul 07 20:16:33 managed-node1 quadlet-demo-mysql[79332]: Warning: Unable to load '/usr/share/zoneinfo/iso3166.tab' as time zone. Skipping it.\nJul 07 20:16:33 managed-node1 quadlet-demo-mysql[79332]: Warning: Unable to load '/usr/share/zoneinfo/leap-seconds.list' as time zone. Skipping it.\nJul 07 20:16:33 managed-node1 quadlet-demo-mysql[79332]: Warning: Unable to load '/usr/share/zoneinfo/zone.tab' as time zone. Skipping it.\nJul 07 20:16:33 managed-node1 quadlet-demo-mysql[79332]: Warning: Unable to load '/usr/share/zoneinfo/zone1970.tab' as time zone. Skipping it.\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Warning] 'proxies_priv' entry '@ root@f33934011335' ignored in --skip-name-resolve mode.\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: \nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34+00:00 [Note] [Entrypoint]: Stopping temporary server\nJul 07 20:16:34 managed-node1 python3.9[80526]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] mysqld: Normal shutdown\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: \nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Giving 0 client threads a chance to die gracefully\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Event Scheduler: Purging the queue. 0 events\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down slave threads\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Forcefully disconnecting 0 remaining clients\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Binlog end\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'partition'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'PERFORMANCE_SCHEMA'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_SYS_DATAFILES'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_SYS_TABLESPACES'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_SYS_FOREIGN_COLS'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_SYS_FOREIGN'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_SYS_FIELDS'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_SYS_COLUMNS'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_SYS_INDEXES'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_SYS_TABLESTATS'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_SYS_TABLES'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_FT_INDEX_TABLE'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_FT_INDEX_CACHE'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_FT_CONFIG'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_FT_BEING_DELETED'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_FT_DELETED'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_FT_DEFAULT_STOPWORD'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_METRICS'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_BUFFER_POOL_STATS'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_BUFFER_PAGE_LRU'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_BUFFER_PAGE'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_CMP_PER_INDEX_RESET'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_CMP_PER_INDEX'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_CMPMEM_RESET'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_CMPMEM'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_CMP_RESET'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_CMP'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_LOCK_WAITS'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_LOCKS'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'INNODB_TRX'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] Shutting down plugin 'InnoDB'\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] InnoDB: FTS optimize thread exiting.\nJul 07 20:16:34 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:34 91 [Note] InnoDB: Starting shutdown...\nJul 07 20:16:34 managed-node1 python3.9[80682]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:16:34 managed-node1 python3.9[80802]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933794.3273475-19728-80933705754489/.source.yml _original_basename=.zg8_d8_6 follow=False checksum=998dccde0483b1654327a46ddd89cbaa47650370 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:35 managed-node1 python3.9[80951]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nJul 07 20:16:35 managed-node1 systemd[1]: Reloading.\nJul 07 20:16:35 managed-node1 quadlet-generator[80957]: Warning: bogus.container specifies the image \"this_is_a_bogus_image\" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.\nJul 07 20:16:35 managed-node1 systemd-rc-local-generator[80969]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:35 91 [Note] InnoDB: Shutdown completed; log sequence number 1625997\nJul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:35 91 [Note] Shutting down plugin 'BLACKHOLE'\nJul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:35 91 [Note] Shutting down plugin 'ARCHIVE'\nJul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:35 91 [Note] Shutting down plugin 'MRG_MYISAM'\nJul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:35 91 [Note] Shutting down plugin 'MyISAM'\nJul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:35 91 [Note] Shutting down plugin 'MEMORY'\nJul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:35 91 [Note] Shutting down plugin 'CSV'\nJul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:35 91 [Note] Shutting down plugin 'sha256_password'\nJul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:35 91 [Note] Shutting down plugin 'mysql_old_password'\nJul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:35 91 [Note] Shutting down plugin 'mysql_native_password'\nJul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:35 91 [Note] Shutting down plugin 'binlog'\nJul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:35 91 [Note] mysqld: Shutdown complete\nJul 07 20:16:35 managed-node1 quadlet-demo-mysql[79332]: \nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36+00:00 [Note] [Entrypoint]: Temporary server stopped\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: \nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36+00:00 [Note] [Entrypoint]: MySQL init process done. Ready for start up.\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: \nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 0 [Warning] TIMESTAMP with implicit DEFAULT value is deprecated. Please use --explicit_defaults_for_timestamp server option (see documentation for more details).\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 0 [Note] mysqld (mysqld 5.6.51) starting as process 1 ...\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] Plugin 'FEDERATED' is disabled.\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: Using atomics to ref count buffer pool pages\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: The InnoDB memory heap is disabled\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: Mutexes and rw_locks use GCC atomic builtins\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: Memory barrier is not used\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: Compressed tables use zlib 1.2.11\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: Using Linux native AIO\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: Using CPU crc32 instructions\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: Initializing buffer pool, size = 128.0M\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: Completed initialization of buffer pool\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: Highest supported file format is Barracuda.\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: 128 rollback segment(s) are active.\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: Waiting for purge to start\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] InnoDB: 5.6.51 started; log sequence number 1625997\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] RSA private key file not found: /var/lib/mysql//private_key.pem. Some authentication plugins will not work.\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] RSA public key file not found: /var/lib/mysql//public_key.pem. Some authentication plugins will not work.\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] Server hostname (bind-address): '*'; port: 3306\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] IPv6 is available.\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] - '::' resolves to '::';\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] Server socket created on IP: '::'.\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Warning] Insecure configuration for --pid-file: Location '/var/run/mysqld' in the path is accessible to all OS users. Consider choosing a different directory.\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Warning] 'proxies_priv' entry '@ root@f33934011335' ignored in --skip-name-resolve mode.\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] Event Scheduler: Loaded 0 events\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: 2025-07-08 00:16:36 1 [Note] mysqld: ready for connections.\nJul 07 20:16:36 managed-node1 quadlet-demo-mysql[79332]: Version: '5.6.51' socket: '/var/run/mysqld/mysqld.sock' port: 3306 MySQL Community Server (GPL)\nJul 07 20:16:36 managed-node1 python3.9[81157]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:16:36 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nJul 07 20:16:37 managed-node1 python3.9[81308]: ansible-slurp Invoked with path=/etc/containers/systemd/quadlet-demo.yml src=/etc/containers/systemd/quadlet-demo.yml\nJul 07 20:16:38 managed-node1 python3.9[81457]: ansible-file Invoked with path=/tmp/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:38 managed-node1 python3.9[81606]: ansible-file Invoked with path=/tmp/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:50 managed-node1 podman[81763]: 2025-07-07 20:16:50.494372081 -0400 EDT m=+11.011831413 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache\nJul 07 20:16:54 managed-node1 podman[82201]: 2025-07-07 20:16:54.405201763 -0400 EDT m=+3.424988203 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0\nJul 07 20:16:54 managed-node1 python3.9[82482]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:55 managed-node1 python3.9[82631]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:16:55 managed-node1 python3.9[82751]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933814.984563-20249-159053501176281/.source.kube dest=/etc/containers/systemd/quadlet-demo.kube owner=root group=0 mode=0644 _original_basename=quadlet-demo.kube follow=False checksum=7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:56 managed-node1 python3.9[82900]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nJul 07 20:16:56 managed-node1 systemd[1]: Reloading.\nJul 07 20:16:56 managed-node1 quadlet-generator[82906]: Warning: bogus.container specifies the image \"this_is_a_bogus_image\" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.\nJul 07 20:16:56 managed-node1 systemd-rc-local-generator[82917]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:16:56 managed-node1 systemd[1]: Starting dnf makecache...\n\u2591\u2591 Subject: A start job for unit dnf-makecache.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit dnf-makecache.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4813.\nJul 07 20:16:56 managed-node1 dnf[82935]: Failed determining last makecache time.\nJul 07 20:16:56 managed-node1 dnf[82935]: Beaker Client - RedHatEnterpriseLinux9 26 kB/s | 1.5 kB 00:00\nJul 07 20:16:56 managed-node1 dnf[82935]: Beaker harness 186 kB/s | 1.3 kB 00:00\nJul 07 20:16:56 managed-node1 dnf[82935]: Copr repo for beakerlib-libraries owned by bgon 15 kB/s | 1.8 kB 00:00\nJul 07 20:16:56 managed-node1 dnf[82935]: CentOS Stream 9 - BaseOS 107 kB/s | 6.7 kB 00:00\nJul 07 20:16:57 managed-node1 python3.9[83087]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None\nJul 07 20:16:57 managed-node1 systemd[1]: Starting quadlet-demo.service...\n\u2591\u2591 Subject: A start job for unit quadlet-demo.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit quadlet-demo.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4943.\nJul 07 20:16:57 managed-node1 quadlet-demo[83099]: Pods stopped:\nJul 07 20:16:57 managed-node1 quadlet-demo[83099]: Pods removed:\nJul 07 20:16:57 managed-node1 quadlet-demo[83099]: Secrets removed:\nJul 07 20:16:57 managed-node1 quadlet-demo[83099]: Volumes removed:\nJul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.197476189 -0400 EDT m=+0.061646893 volume create wp-pv-claim\nJul 07 20:16:57 managed-node1 podman[83091]: 2025-07-07 20:16:57.207487227 -0400 EDT m=+0.103148810 container health_status f33934011335e1e05e67f30d3833447877c8c3f24563f59e1979b85508143a46 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service)\nJul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.222721195 -0400 EDT m=+0.086891899 container create 5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042 (image=, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.232816142 -0400 EDT m=+0.096986817 volume create envoy-proxy-config\nJul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.239515168 -0400 EDT m=+0.103685845 volume create envoy-certificates\nJul 07 20:16:57 managed-node1 systemd[1]: Created slice cgroup machine-libpod_pod_b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f.slice.\n\u2591\u2591 Subject: A start job for unit machine-libpod_pod_b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit machine-libpod_pod_b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5017.\nJul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.277595719 -0400 EDT m=+0.141766395 container create fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa (image=, name=b57dd77843de-infra, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.283498672 -0400 EDT m=+0.147669648 pod create b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f (image=, name=quadlet-demo)\nJul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.321988353 -0400 EDT m=+0.186159035 container create 8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:16:57 managed-node1 dnf[82935]: CentOS Stream 9 - AppStream 23 kB/s | 6.8 kB 00:00\nJul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.349118944 -0400 EDT m=+0.213289676 container create f5dcbbf00e552991b3b6792f8c3d2f6deb07fe4db47e3053c9d8e7efd6db9bad (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.349489551 -0400 EDT m=+0.213660343 container restart 5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042 (image=, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.291559491 -0400 EDT m=+0.155730291 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache\nJul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.32463886 -0400 EDT m=+0.188809721 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0\nJul 07 20:16:57 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5021.\nJul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.451993289 -0400 EDT m=+0.316164217 container init 5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042 (image=, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.458750698 -0400 EDT m=+0.322921489 container start 5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042 (image=, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:16:57 managed-node1 kernel: podman2: port 2(veth1) entered blocking state\nJul 07 20:16:57 managed-node1 kernel: podman2: port 2(veth1) entered disabled state\nJul 07 20:16:57 managed-node1 kernel: veth1: entered allmulticast mode\nJul 07 20:16:57 managed-node1 kernel: veth1: entered promiscuous mode\nJul 07 20:16:57 managed-node1 kernel: podman2: port 2(veth1) entered blocking state\nJul 07 20:16:57 managed-node1 kernel: podman2: port 2(veth1) entered forwarding state\nJul 07 20:16:57 managed-node1 NetworkManager[642]: [1751933817.4941] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/11)\nJul 07 20:16:57 managed-node1 NetworkManager[642]: [1751933817.4968] device (veth1): carrier: link connected\nJul 07 20:16:57 managed-node1 systemd-udevd[83134]: Network interface NamePolicy= disabled on kernel command line.\nJul 07 20:16:57 managed-node1 dnf[82935]: CentOS Stream 9 - HighAvailability 99 kB/s | 7.1 kB 00:00\nJul 07 20:16:57 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5026.\nJul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.630340964 -0400 EDT m=+0.494511760 container init fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa (image=, name=b57dd77843de-infra, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.633530268 -0400 EDT m=+0.497701060 container start fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa (image=, name=b57dd77843de-infra, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:16:57 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5031.\nJul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.680011486 -0400 EDT m=+0.544182268 container init 8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.684449747 -0400 EDT m=+0.548620577 container start 8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:16:57 managed-node1 quadlet-demo-wordpress[83203]: WordPress not found in /var/www/html - copying now...\nJul 07 20:16:57 managed-node1 dnf[82935]: CentOS Stream 9 - Extras packages 46 kB/s | 7.3 kB 00:00\nJul 07 20:16:57 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-f5dcbbf00e552991b3b6792f8c3d2f6deb07fe4db47e3053c9d8e7efd6db9bad.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-f5dcbbf00e552991b3b6792f8c3d2f6deb07fe4db47e3053c9d8e7efd6db9bad.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5036.\nJul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.786067539 -0400 EDT m=+0.650238481 container init f5dcbbf00e552991b3b6792f8c3d2f6deb07fe4db47e3053c9d8e7efd6db9bad (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.793179398 -0400 EDT m=+0.657350165 container start f5dcbbf00e552991b3b6792f8c3d2f6deb07fe4db47e3053c9d8e7efd6db9bad (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:16:57 managed-node1 podman[83099]: 2025-07-07 20:16:57.801305069 -0400 EDT m=+0.665475779 pod start b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f (image=, name=quadlet-demo)\nJul 07 20:16:57 managed-node1 systemd[1]: Started quadlet-demo.service.\n\u2591\u2591 Subject: A start job for unit quadlet-demo.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit quadlet-demo.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4943.\nJul 07 20:16:57 managed-node1 quadlet-demo[83099]: Volumes:\nJul 07 20:16:57 managed-node1 quadlet-demo[83099]: wp-pv-claim\nJul 07 20:16:57 managed-node1 quadlet-demo[83099]: Pod:\nJul 07 20:16:57 managed-node1 quadlet-demo[83099]: b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f\nJul 07 20:16:57 managed-node1 quadlet-demo[83099]: Containers:\nJul 07 20:16:57 managed-node1 quadlet-demo[83099]: 8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5\nJul 07 20:16:57 managed-node1 quadlet-demo[83099]: f5dcbbf00e552991b3b6792f8c3d2f6deb07fe4db47e3053c9d8e7efd6db9bad\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:404] initializing epoch 0 (base id=0, hot restart version=11.104)\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:406] statically linked extensions:\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.stats_sinks: envoy.dog_statsd, envoy.graphite_statsd, envoy.metrics_service, envoy.stat_sinks.dog_statsd, envoy.stat_sinks.graphite_statsd, envoy.stat_sinks.hystrix, envoy.stat_sinks.metrics_service, envoy.stat_sinks.statsd, envoy.stat_sinks.wasm, envoy.statsd\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.tls.cert_validator: envoy.tls.cert_validator.default, envoy.tls.cert_validator.spiffe\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.listener_manager_impl: envoy.listener_manager_impl.default\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.matching.http.custom_matchers: envoy.matching.custom_matchers.trie_matcher\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.filters.network: envoy.echo, envoy.ext_authz, envoy.filters.network.connection_limit, envoy.filters.network.direct_response, envoy.filters.network.dubbo_proxy, envoy.filters.network.echo, envoy.filters.network.ext_authz, envoy.filters.network.http_connection_manager, envoy.filters.network.local_ratelimit, envoy.filters.network.mongo_proxy, envoy.filters.network.ratelimit, envoy.filters.network.rbac, envoy.filters.network.redis_proxy, envoy.filters.network.sni_cluster, envoy.filters.network.sni_dynamic_forward_proxy, envoy.filters.network.tcp_proxy, envoy.filters.network.thrift_proxy, envoy.filters.network.wasm, envoy.filters.network.zookeeper_proxy, envoy.http_connection_manager, envoy.mongo_proxy, envoy.ratelimit, envoy.redis_proxy, envoy.tcp_proxy\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.upstreams: envoy.filters.connection_pools.tcp.generic\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.access_loggers.extension_filters: envoy.access_loggers.extension_filters.cel\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.filters.http.upstream: envoy.buffer, envoy.filters.http.admission_control, envoy.filters.http.buffer, envoy.filters.http.upstream_codec\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.http.stateful_header_formatters: envoy.http.stateful_header_formatters.preserve_case, preserve_case\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.dubbo_proxy.protocols: dubbo\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.quic.server.crypto_stream: envoy.quic.crypto_stream.server.quiche\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.route.early_data_policy: envoy.route.early_data_policy.default\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.guarddog_actions: envoy.watchdog.abort_action, envoy.watchdog.profile_action\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.config.validators: envoy.config.validators.minimum_clusters, envoy.config.validators.minimum_clusters_validator\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.matching.http.input: envoy.matching.inputs.destination_ip, envoy.matching.inputs.destination_port, envoy.matching.inputs.direct_source_ip, envoy.matching.inputs.dns_san, envoy.matching.inputs.request_headers, envoy.matching.inputs.request_trailers, envoy.matching.inputs.response_headers, envoy.matching.inputs.response_trailers, envoy.matching.inputs.server_name, envoy.matching.inputs.source_ip, envoy.matching.inputs.source_port, envoy.matching.inputs.source_type, envoy.matching.inputs.status_code_class_input, envoy.matching.inputs.status_code_input, envoy.matching.inputs.subject, envoy.matching.inputs.uri_san\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.848][1][info][main] [source/server/server.cc:408] envoy.quic.connection_id_generator: envoy.quic.deterministic_connection_id_generator\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.filters.listener: envoy.filters.listener.http_inspector, envoy.filters.listener.original_dst, envoy.filters.listener.original_src, envoy.filters.listener.proxy_protocol, envoy.filters.listener.tls_inspector, envoy.listener.http_inspector, envoy.listener.original_dst, envoy.listener.original_src, envoy.listener.proxy_protocol, envoy.listener.tls_inspector\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.filters.udp_listener: envoy.filters.udp.dns_filter, envoy.filters.udp_listener.udp_proxy\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.rate_limit_descriptors: envoy.rate_limit_descriptors.expr\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.path.match: envoy.path.match.uri_template.uri_template_matcher\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.thrift_proxy.transports: auto, framed, header, unframed\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.request_id: envoy.request_id.uuid\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.http.stateful_session: envoy.http.stateful_session.cookie, envoy.http.stateful_session.header\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.matching.network.custom_matchers: envoy.matching.custom_matchers.trie_matcher\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.load_balancing_policies: envoy.load_balancing_policies.least_request, envoy.load_balancing_policies.random, envoy.load_balancing_policies.round_robin\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.http.early_header_mutation: envoy.http.early_header_mutation.header_mutation\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.udp_packet_writer: envoy.udp_packet_writer.default, envoy.udp_packet_writer.gso\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.resource_monitors: envoy.resource_monitors.fixed_heap, envoy.resource_monitors.injected_resource\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.network.dns_resolver: envoy.network.dns_resolver.cares, envoy.network.dns_resolver.getaddrinfo\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.health_checkers: envoy.health_checkers.redis, envoy.health_checkers.thrift\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] quic.http_server_connection: quic.http_server_connection.default\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.formatter: envoy.formatter.metadata, envoy.formatter.req_without_query\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.regex_engines: envoy.regex_engines.google_re2\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.matching.input_matchers: envoy.matching.matchers.consistent_hashing, envoy.matching.matchers.ip\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.849][1][info][main] [source/server/server.cc:408] envoy.compression.decompressor: envoy.compression.brotli.decompressor, envoy.compression.gzip.decompressor, envoy.compression.zstd.decompressor\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.853][1][info][main] [source/server/server.cc:408] envoy.upstream_options: envoy.extensions.upstreams.http.v3.HttpProtocolOptions, envoy.extensions.upstreams.tcp.v3.TcpProtocolOptions, envoy.upstreams.http.http_protocol_options, envoy.upstreams.tcp.tcp_protocol_options\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.853][1][info][main] [source/server/server.cc:408] envoy.resolvers: envoy.ip\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.853][1][info][main] [source/server/server.cc:408] envoy.path.rewrite: envoy.path.rewrite.uri_template.uri_template_rewriter\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.853][1][info][main] [source/server/server.cc:408] envoy.bootstrap: envoy.bootstrap.internal_listener, envoy.bootstrap.wasm, envoy.extensions.network.socket_interface.default_socket_interface\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.854][1][info][main] [source/server/server.cc:408] envoy.transport_sockets.downstream: envoy.transport_sockets.alts, envoy.transport_sockets.quic, envoy.transport_sockets.raw_buffer, envoy.transport_sockets.starttls, envoy.transport_sockets.tap, envoy.transport_sockets.tcp_stats, envoy.transport_sockets.tls, raw_buffer, starttls, tls\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.854][1][info][main] [source/server/server.cc:408] envoy.dubbo_proxy.serializers: dubbo.hessian2\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.854][1][info][main] [source/server/server.cc:408] envoy.matching.action: envoy.matching.actions.format_string, filter-chain-name\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.854][1][info][main] [source/server/server.cc:408] envoy.grpc_credentials: envoy.grpc_credentials.aws_iam, envoy.grpc_credentials.default, envoy.grpc_credentials.file_based_metadata\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.854][1][info][main] [source/server/server.cc:408] envoy.retry_priorities: envoy.retry_priorities.previous_priorities\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.855][1][info][main] [source/server/server.cc:408] envoy.filters.http: envoy.bandwidth_limit, envoy.buffer, envoy.cors, envoy.csrf, envoy.ext_authz, envoy.ext_proc, envoy.fault, envoy.filters.http.adaptive_concurrency, envoy.filters.http.admission_control, envoy.filters.http.alternate_protocols_cache, envoy.filters.http.aws_lambda, envoy.filters.http.aws_request_signing, envoy.filters.http.bandwidth_limit, envoy.filters.http.buffer, envoy.filters.http.cache, envoy.filters.http.cdn_loop, envoy.filters.http.composite, envoy.filters.http.compressor, envoy.filters.http.cors, envoy.filters.http.csrf, envoy.filters.http.custom_response, envoy.filters.http.decompressor, envoy.filters.http.dynamic_forward_proxy, envoy.filters.http.ext_authz, envoy.filters.http.ext_proc, envoy.filters.http.fault, envoy.filters.http.file_system_buffer, envoy.filters.http.gcp_authn, envoy.filters.http.grpc_http1_bridge, envoy.filters.http.grpc_http1_reverse_bridge, envoy.filters.http.grpc_json_transcoder, envoy.filters.http.grpc_stats, envoy.filters.http.grpc_web, envoy.filters.http.header_to_metadata, envoy.filters.http.health_check, envoy.filters.http.ip_tagging, envoy.filters.http.jwt_authn, envoy.filters.http.local_ratelimit, envoy.filters.http.lua, envoy.filters.http.match_delegate, envoy.filters.http.oauth2, envoy.filters.http.on_demand, envoy.filters.http.original_src, envoy.filters.http.rate_limit_quota, envoy.filters.http.ratelimit, envoy.filters.http.rbac, envoy.filters.http.router, envoy.filters.http.set_metadata, envoy.filters.http.stateful_session, envoy.filters.http.tap, envoy.filters.http.wasm, envoy.grpc_http1_bridge, envoy.grpc_json_transcoder, envoy.grpc_web, envoy.health_check, envoy.ip_tagging, envoy.local_rate_limit, envoy.lua, envoy.rate_limit, envoy.router\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.retry_host_predicates: envoy.retry_host_predicates.omit_canary_hosts, envoy.retry_host_predicates.omit_host_metadata, envoy.retry_host_predicates.previous_hosts\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.http.cache: envoy.extensions.http.cache.file_system_http_cache, envoy.extensions.http.cache.simple\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.wasm.runtime: envoy.wasm.runtime.null, envoy.wasm.runtime.v8\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.common.key_value: envoy.key_value.file_based\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.quic.proof_source: envoy.quic.proof_source.filter_chain\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.access_loggers: envoy.access_loggers.file, envoy.access_loggers.http_grpc, envoy.access_loggers.open_telemetry, envoy.access_loggers.stderr, envoy.access_loggers.stdout, envoy.access_loggers.tcp_grpc, envoy.access_loggers.wasm, envoy.file_access_log, envoy.http_grpc_access_log, envoy.open_telemetry_access_log, envoy.stderr_access_log, envoy.stdout_access_log, envoy.tcp_grpc_access_log, envoy.wasm_access_log\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.http.original_ip_detection: envoy.http.original_ip_detection.custom_header, envoy.http.original_ip_detection.xff\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.internal_redirect_predicates: envoy.internal_redirect_predicates.allow_listed_routes, envoy.internal_redirect_predicates.previous_routes, envoy.internal_redirect_predicates.safe_cross_scheme\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.connection_handler: envoy.connection_handler.default\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.thrift_proxy.protocols: auto, binary, binary/non-strict, compact, twitter\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.http.header_validators: envoy.http.header_validators.envoy_default\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.matching.network.input: envoy.matching.inputs.application_protocol, envoy.matching.inputs.destination_ip, envoy.matching.inputs.destination_port, envoy.matching.inputs.direct_source_ip, envoy.matching.inputs.dns_san, envoy.matching.inputs.server_name, envoy.matching.inputs.source_ip, envoy.matching.inputs.source_port, envoy.matching.inputs.source_type, envoy.matching.inputs.subject, envoy.matching.inputs.transport_protocol, envoy.matching.inputs.uri_san\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.matching.common_inputs: envoy.matching.common_inputs.environment_variable\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] network.connection.client: default, envoy_internal\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.tracers: envoy.dynamic.ot, envoy.tracers.datadog, envoy.tracers.dynamic_ot, envoy.tracers.opencensus, envoy.tracers.opentelemetry, envoy.tracers.skywalking, envoy.tracers.xray, envoy.tracers.zipkin, envoy.zipkin\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.transport_sockets.upstream: envoy.transport_sockets.alts, envoy.transport_sockets.http_11_proxy, envoy.transport_sockets.internal_upstream, envoy.transport_sockets.quic, envoy.transport_sockets.raw_buffer, envoy.transport_sockets.starttls, envoy.transport_sockets.tap, envoy.transport_sockets.tcp_stats, envoy.transport_sockets.tls, envoy.transport_sockets.upstream_proxy_protocol, raw_buffer, starttls, tls\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.thrift_proxy.filters: envoy.filters.thrift.header_to_metadata, envoy.filters.thrift.payload_to_metadata, envoy.filters.thrift.rate_limit, envoy.filters.thrift.router\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.compression.compressor: envoy.compression.brotli.compressor, envoy.compression.gzip.compressor, envoy.compression.zstd.compressor\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.clusters: envoy.cluster.eds, envoy.cluster.logical_dns, envoy.cluster.original_dst, envoy.cluster.static, envoy.cluster.strict_dns, envoy.clusters.aggregate, envoy.clusters.dynamic_forward_proxy, envoy.clusters.redis\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.http.custom_response: envoy.extensions.http.custom_response.local_response_policy, envoy.extensions.http.custom_response.redirect_policy\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.dubbo_proxy.filters: envoy.filters.dubbo.router\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.856][1][info][main] [source/server/server.cc:408] envoy.rbac.matchers: envoy.rbac.matchers.upstream_ip_port\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.862][1][info][main] [source/server/server.cc:456] HTTP header map info:\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.864][1][info][main] [source/server/server.cc:459] request header map: 672 bytes: :authority,:method,:path,:protocol,:scheme,accept,accept-encoding,access-control-request-headers,access-control-request-method,access-control-request-private-network,authentication,authorization,cache-control,cdn-loop,connection,content-encoding,content-length,content-type,expect,grpc-accept-encoding,grpc-timeout,if-match,if-modified-since,if-none-match,if-range,if-unmodified-since,keep-alive,origin,pragma,proxy-connection,proxy-status,referer,te,transfer-encoding,upgrade,user-agent,via,x-client-trace-id,x-envoy-attempt-count,x-envoy-decorator-operation,x-envoy-downstream-service-cluster,x-envoy-downstream-service-node,x-envoy-expected-rq-timeout-ms,x-envoy-external-address,x-envoy-force-trace,x-envoy-hedge-on-per-try-timeout,x-envoy-internal,x-envoy-ip-tags,x-envoy-is-timeout-retry,x-envoy-max-retries,x-envoy-original-path,x-envoy-original-url,x-envoy-retriable-header-names,x-envoy-retriable-status-codes,x-envoy-retry-grpc-on,x-envoy-retry-on,x-envoy-upstream-alt-stat-name,x-envoy-upstream-rq-per-try-timeout-ms,x-envoy-upstream-rq-timeout-alt-response,x-envoy-upstream-rq-timeout-ms,x-envoy-upstream-stream-duration-ms,x-forwarded-client-cert,x-forwarded-for,x-forwarded-host,x-forwarded-port,x-forwarded-proto,x-ot-span-context,x-request-id\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.864][1][info][main] [source/server/server.cc:459] request trailer map: 120 bytes: \nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.864][1][info][main] [source/server/server.cc:459] response header map: 432 bytes: :status,access-control-allow-credentials,access-control-allow-headers,access-control-allow-methods,access-control-allow-origin,access-control-allow-private-network,access-control-expose-headers,access-control-max-age,age,cache-control,connection,content-encoding,content-length,content-type,date,etag,expires,grpc-message,grpc-status,keep-alive,last-modified,location,proxy-connection,proxy-status,server,transfer-encoding,upgrade,vary,via,x-envoy-attempt-count,x-envoy-decorator-operation,x-envoy-degraded,x-envoy-immediate-health-check-fail,x-envoy-ratelimited,x-envoy-upstream-canary,x-envoy-upstream-healthchecked-cluster,x-envoy-upstream-service-time,x-request-id\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.864][1][info][main] [source/server/server.cc:459] response trailer map: 144 bytes: grpc-message,grpc-status\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.870][1][info][main] [source/server/server.cc:819] runtime: {}\nJul 07 20:16:57 managed-node1 dnf[82935]: Extra Packages for Enterprise Linux 9 - x86_64 235 kB/s | 34 kB 00:00\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.887][1][info][admin] [source/server/admin/admin.cc:67] admin address: 0.0.0.0:9901\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.888][1][info][config] [source/server/configuration_impl.cc:131] loading tracing configuration\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.888][1][info][config] [source/server/configuration_impl.cc:91] loading 0 static secret(s)\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.888][1][info][config] [source/server/configuration_impl.cc:97] loading 1 cluster(s)\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.902][1][info][config] [source/server/configuration_impl.cc:101] loading 1 listener(s)\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.918][1][info][config] [source/server/configuration_impl.cc:113] loading stats configuration\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.918][1][info][runtime] [source/common/runtime/runtime_impl.cc:463] RTDS has finished initialization\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.919][1][info][upstream] [source/common/upstream/cluster_manager_impl.cc:226] cm init: all clusters initialized\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.919][1][warning][main] [source/server/server.cc:794] there is no configured limit to the number of allowed active connections. Set a limit via the runtime key overload.global_downstream_max_connections\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.922][1][info][main] [source/server/server.cc:896] all clusters initialized. initializing init manager\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.922][1][info][config] [source/extensions/listener_managers/listener_manager/listener_manager_impl.cc:852] all dependencies initialized. starting workers\nJul 07 20:16:57 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:16:57.923][1][info][main] [source/server/server.cc:915] starting main dispatch loop\nJul 07 20:16:57 managed-node1 quadlet-demo-wordpress[83203]: Complete! WordPress has been successfully copied to /var/www/html\nJul 07 20:16:58 managed-node1 python3.9[83469]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/containers/systemd _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:16:59 managed-node1 dnf[82935]: Extra Packages for Enterprise Linux 9 - x86_64 16 MB/s | 20 MB 00:01\nJul 07 20:16:59 managed-node1 quadlet-demo-wordpress[83203]: AH00558: apache2: Could not reliably determine the server's fully qualified domain name, using 192.168.30.3. Set the 'ServerName' directive globally to suppress this message\nJul 07 20:16:59 managed-node1 quadlet-demo-wordpress[83203]: AH00558: apache2: Could not reliably determine the server's fully qualified domain name, using 192.168.30.3. Set the 'ServerName' directive globally to suppress this message\nJul 07 20:16:59 managed-node1 quadlet-demo-wordpress[83203]: [Tue Jul 08 00:16:59.357593 2025] [mpm_prefork:notice] [pid 1] AH00163: Apache/2.4.10 (Debian) PHP/5.6.32 configured -- resuming normal operations\nJul 07 20:16:59 managed-node1 quadlet-demo-wordpress[83203]: [Tue Jul 08 00:16:59.360135 2025] [core:notice] [pid 1] AH00094: Command line: 'apache2 -D FOREGROUND'\nJul 07 20:16:59 managed-node1 python3.9[83699]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:16:59 managed-node1 dnf[82935]: Extra Packages for Enterprise Linux 9 - x86_64 69 kB/s | 21 kB 00:00\nJul 07 20:16:59 managed-node1 python3.9[83873]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:17:00 managed-node1 dnf[82935]: Extra Packages for Enterprise Linux 9 - x86_64 229 kB/s | 23 kB 00:00\nJul 07 20:17:00 managed-node1 dnf[82935]: Extra Packages for Enterprise Linux 9 - x86_64 14 MB/s | 4.0 MB 00:00\nJul 07 20:17:00 managed-node1 python3.9[84035]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:17:00 managed-node1 python3.9[84192]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:17:01 managed-node1 python3.9[84344]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:17:01 managed-node1 quadlet-demo-wordpress[83203]: 127.0.0.1 - - [08/Jul/2025:00:17:01 +0000] \"GET / HTTP/1.1\" 302 324 \"-\" \"ansible-httpget\"\nJul 07 20:17:02 managed-node1 quadlet-demo-wordpress[83203]: 127.0.0.1 - - [08/Jul/2025:00:17:01 +0000] \"GET /wp-admin/install.php HTTP/1.1\" 200 11984 \"-\" \"ansible-httpget\"\nJul 07 20:17:03 managed-node1 python3.9[84495]: ansible-ansible.legacy.command Invoked with _raw_params=cat /run/out _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:17:03 managed-node1 python3.9[84645]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:17:03 managed-node1 dnf[82935]: Extra Packages for Enterprise Linux 9 openh264 6.0 kB/s | 993 B 00:00\nJul 07 20:17:03 managed-node1 dnf[82935]: Copr repo for qa-tools owned by lpol 32 kB/s | 1.8 kB 00:00\nJul 07 20:17:04 managed-node1 python3.9[84804]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:17:04 managed-node1 python3.9[84961]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units --all | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:17:05 managed-node1 python3.9[85113]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/systemd/system _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:17:06 managed-node1 dnf[82935]: Metadata cache created.\nJul 07 20:17:06 managed-node1 systemd[1]: dnf-makecache.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit dnf-makecache.service has successfully entered the 'dead' state.\nJul 07 20:17:06 managed-node1 systemd[1]: Finished dnf makecache.\n\u2591\u2591 Subject: A start job for unit dnf-makecache.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit dnf-makecache.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4813.\nJul 07 20:17:06 managed-node1 systemd[1]: dnf-makecache.service: Consumed 7.664s CPU time.\n\u2591\u2591 Subject: Resources consumed by unit runtime\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit dnf-makecache.service completed and consumed the indicated resources.\nJul 07 20:17:07 managed-node1 python3.9[85412]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:17:08 managed-node1 python3.9[85567]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:17:11 managed-node1 python3.9[85718]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nJul 07 20:17:14 managed-node1 python3.9[85868]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None\nJul 07 20:17:15 managed-node1 python3.9[86019]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None\nJul 07 20:17:15 managed-node1 python3.9[86170]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 07 20:17:16 managed-node1 python3.9[86319]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 07 20:17:18 managed-node1 podman[86469]: 2025-07-07 20:17:18.711105559 -0400 EDT m=+0.019785469 secret remove ea8c34f75eeeee13a33c9ef0e\nJul 07 20:17:19 managed-node1 podman[86626]: 2025-07-07 20:17:19.995189735 -0400 EDT m=+0.018739940 secret remove cb2d4c65044c524adeead96e5\nJul 07 20:17:21 managed-node1 podman[86782]: 2025-07-07 20:17:21.222445829 -0400 EDT m=+0.020468808 secret remove a95acde29e8944984a9c6b05c\nJul 07 20:17:22 managed-node1 python3.9[86938]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:17:23 managed-node1 python3.9[87089]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None\nJul 07 20:17:23 managed-node1 systemd[1]: Reloading.\nJul 07 20:17:23 managed-node1 quadlet-generator[87097]: Warning: bogus.container specifies the image \"this_is_a_bogus_image\" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.\nJul 07 20:17:23 managed-node1 systemd-rc-local-generator[87109]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:17:23 managed-node1 systemd[1]: Stopping quadlet-demo.service...\n\u2591\u2591 Subject: A stop job for unit quadlet-demo.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit quadlet-demo.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5041.\nJul 07 20:17:23 managed-node1 systemd[1]: libpod-5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042.scope has successfully entered the 'dead' state.\nJul 07 20:17:23 managed-node1 podman[87128]: 2025-07-07 20:17:23.586690926 -0400 EDT m=+0.025121505 container died 5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042 (image=, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:17:23 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042-rootfs-merge.mount has successfully entered the 'dead' state.\nJul 07 20:17:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:17:24 managed-node1 podman[87128]: 2025-07-07 20:17:24.598463702 -0400 EDT m=+1.036894323 container cleanup 5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042 (image=, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:17:24 managed-node1 podman[87138]: 2025-07-07 20:17:24.64198547 -0400 EDT m=+0.026141643 pod stop b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f (image=, name=quadlet-demo)\nJul 07 20:17:24 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:17:24.669][1][warning][main] [source/server/server.cc:854] caught ENVOY_SIGTERM\nJul 07 20:17:24 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:17:24.670][1][info][main] [source/server/server.cc:985] shutting down server instance\nJul 07 20:17:24 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:17:24.670][1][info][main] [source/server/server.cc:920] main dispatch loop exited\nJul 07 20:17:24 managed-node1 quadlet-demo-envoy[83212]: [2025-07-08 00:17:24.670][1][info][main] [source/server/server.cc:972] exiting\nJul 07 20:17:24 managed-node1 systemd[1]: libpod-f5dcbbf00e552991b3b6792f8c3d2f6deb07fe4db47e3053c9d8e7efd6db9bad.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-f5dcbbf00e552991b3b6792f8c3d2f6deb07fe4db47e3053c9d8e7efd6db9bad.scope has successfully entered the 'dead' state.\nJul 07 20:17:24 managed-node1 podman[87138]: 2025-07-07 20:17:24.680641899 -0400 EDT m=+0.064798159 container died f5dcbbf00e552991b3b6792f8c3d2f6deb07fe4db47e3053c9d8e7efd6db9bad (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:17:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay-385b8e73dfdfcde23ee0e5b8bf415127191cfc5d4323574c93bc4e97979eab2d-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-385b8e73dfdfcde23ee0e5b8bf415127191cfc5d4323574c93bc4e97979eab2d-merged.mount has successfully entered the 'dead' state.\nJul 07 20:17:24 managed-node1 podman[87138]: 2025-07-07 20:17:24.713081174 -0400 EDT m=+0.097237436 container cleanup f5dcbbf00e552991b3b6792f8c3d2f6deb07fe4db47e3053c9d8e7efd6db9bad (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:17:24 managed-node1 quadlet-demo-wordpress[83203]: [Tue Jul 08 00:17:24.732426 2025] [mpm_prefork:notice] [pid 1] AH00169: caught SIGTERM, shutting down\nJul 07 20:17:24 managed-node1 systemd[1]: libpod-8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5.scope has successfully entered the 'dead' state.\nJul 07 20:17:24 managed-node1 systemd[1]: libpod-8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5.scope: Consumed 1.319s CPU time.\n\u2591\u2591 Subject: Resources consumed by unit runtime\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5.scope completed and consumed the indicated resources.\nJul 07 20:17:24 managed-node1 podman[87138]: 2025-07-07 20:17:24.751824722 -0400 EDT m=+0.135980934 container died 8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:17:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay-976835557e70125fb8b0fbe2f888b9f69052ecf805471614a74eec815f5ce212-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-976835557e70125fb8b0fbe2f888b9f69052ecf805471614a74eec815f5ce212-merged.mount has successfully entered the 'dead' state.\nJul 07 20:17:24 managed-node1 podman[87138]: 2025-07-07 20:17:24.78485353 -0400 EDT m=+0.169009633 container cleanup 8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:17:24 managed-node1 systemd[1]: libpod-fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa.scope has successfully entered the 'dead' state.\nJul 07 20:17:24 managed-node1 podman[87138]: 2025-07-07 20:17:24.803343794 -0400 EDT m=+0.187500269 container died fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa (image=, name=b57dd77843de-infra, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:17:24 managed-node1 kernel: podman2: port 2(veth1) entered disabled state\nJul 07 20:17:24 managed-node1 kernel: veth1 (unregistering): left allmulticast mode\nJul 07 20:17:24 managed-node1 kernel: veth1 (unregistering): left promiscuous mode\nJul 07 20:17:24 managed-node1 kernel: podman2: port 2(veth1) entered disabled state\nJul 07 20:17:24 managed-node1 systemd[1]: run-netns-netns\\x2dad524ec0\\x2da4a3\\x2d4520\\x2d8e2f\\x2dc0b64ccd5fc1.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-netns-netns\\x2dad524ec0\\x2da4a3\\x2d4520\\x2d8e2f\\x2dc0b64ccd5fc1.mount has successfully entered the 'dead' state.\nJul 07 20:17:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa-rootfs-merge.mount has successfully entered the 'dead' state.\nJul 07 20:17:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:17:24 managed-node1 podman[87138]: 2025-07-07 20:17:24.954632715 -0400 EDT m=+0.338788816 container cleanup fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa (image=, name=b57dd77843de-infra, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:17:24 managed-node1 systemd[1]: Removed slice cgroup machine-libpod_pod_b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f.slice.\n\u2591\u2591 Subject: A stop job for unit machine-libpod_pod_b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f.slice has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit machine-libpod_pod_b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f.slice has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5043 and the job result is done.\nJul 07 20:17:24 managed-node1 systemd[1]: machine-libpod_pod_b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f.slice: Consumed 1.428s CPU time.\n\u2591\u2591 Subject: Resources consumed by unit runtime\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit machine-libpod_pod_b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f.slice completed and consumed the indicated resources.\nJul 07 20:17:24 managed-node1 podman[87138]: 2025-07-07 20:17:24.980101642 -0400 EDT m=+0.364257774 container remove 8f4b73cb87b95bb27844fa2d580c22f2a52dbe1269c081ad4569f690067512d5 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:17:24 managed-node1 podman[87138]: 2025-07-07 20:17:24.984329832 -0400 EDT m=+0.368485981 container remove f5dcbbf00e552991b3b6792f8c3d2f6deb07fe4db47e3053c9d8e7efd6db9bad (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:17:25 managed-node1 podman[87138]: 2025-07-07 20:17:25.000781891 -0400 EDT m=+0.384938028 container remove fc833924e791c4ee272f6be670b149ece932ff4ca96fbcb5f3c520b1003983aa (image=, name=b57dd77843de-infra, pod_id=b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:17:25 managed-node1 systemd[1]: machine-libpod_pod_b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f.slice: Failed to open /run/systemd/transient/machine-libpod_pod_b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f.slice: No such file or directory\nJul 07 20:17:25 managed-node1 podman[87138]: 2025-07-07 20:17:25.007117903 -0400 EDT m=+0.391274000 pod remove b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f (image=, name=quadlet-demo)\nJul 07 20:17:25 managed-node1 podman[87138]: 2025-07-07 20:17:25.024618836 -0400 EDT m=+0.408775208 container remove 5b34c5fc3265b3794a0ef5b2429311203d28c799e8625edce74ae36aee5c2042 (image=, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)\nJul 07 20:17:25 managed-node1 quadlet-demo[87138]: Pods stopped:\nJul 07 20:17:25 managed-node1 quadlet-demo[87138]: b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f\nJul 07 20:17:25 managed-node1 quadlet-demo[87138]: Pods removed:\nJul 07 20:17:25 managed-node1 quadlet-demo[87138]: b57dd77843dedc0c93d38e8e96bb998e6ba922e902111f2d94b0f5f250370f5f\nJul 07 20:17:25 managed-node1 quadlet-demo[87138]: Secrets removed:\nJul 07 20:17:25 managed-node1 quadlet-demo[87138]: Volumes removed:\nJul 07 20:17:25 managed-node1 systemd[1]: quadlet-demo.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit quadlet-demo.service has successfully entered the 'dead' state.\nJul 07 20:17:25 managed-node1 systemd[1]: Stopped quadlet-demo.service.\n\u2591\u2591 Subject: A stop job for unit quadlet-demo.service has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit quadlet-demo.service has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5041 and the job result is done.\nJul 07 20:17:25 managed-node1 python3.9[87347]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:17:26 managed-node1 python3.9[87647]: ansible-ansible.legacy.command Invoked with _raw_params=exec 1>&2\n set -x\n set -o pipefail\n systemctl list-units --plain -l --all | grep quadlet || :\n systemctl list-unit-files --all | grep quadlet || :\n systemctl list-units --plain --failed -l --all | grep quadlet || :\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:17:27 managed-node1 python3.9[87803]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None",
"task_name": "Get journald",
"task_path": "/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:217"
}
]
SYSTEM ROLES ERRORS END v1
TASKS RECAP ********************************************************************
Monday 07 July 2025 20:17:27 -0400 (0:00:00.410) 0:01:48.422 ***********
===============================================================================
fedora.linux_system_roles.podman : Ensure container images are present -- 15.44s
/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
fedora.linux_system_roles.podman : Ensure container images are present --- 6.67s
/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
fedora.linux_system_roles.firewall : Install firewalld ------------------ 3.19s
/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:51
fedora.linux_system_roles.podman : Stop and disable service ------------- 2.22s
/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Check web --------------------------------------------------------------- 1.62s
/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:121
fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed --- 1.42s
/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5
fedora.linux_system_roles.podman : Start service ------------------------ 1.37s
/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115
fedora.linux_system_roles.certificate : Ensure provider packages are installed --- 1.33s
/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:15
fedora.linux_system_roles.firewall : Install firewalld ------------------ 1.31s
/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:51
fedora.linux_system_roles.firewall : Configure firewall ----------------- 1.28s
/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:74
fedora.linux_system_roles.certificate : Slurp the contents of the files --- 1.18s
/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:143
fedora.linux_system_roles.podman : Gather the package facts ------------- 1.16s
/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6
fedora.linux_system_roles.certificate : Remove files -------------------- 1.05s
/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:174
Gathering Facts --------------------------------------------------------- 1.03s
/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9
fedora.linux_system_roles.firewall : Configure firewall ----------------- 1.03s
/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:74
fedora.linux_system_roles.firewall : Enable and start firewalld service --- 1.01s
/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:30
fedora.linux_system_roles.podman : Gather the package facts ------------- 0.99s
/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6
fedora.linux_system_roles.certificate : Ensure certificate requests ----- 0.97s
/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:86
fedora.linux_system_roles.podman : Start service ------------------------ 0.87s
/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115
fedora.linux_system_roles.podman : Create host directories -------------- 0.81s
/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7