ansible-playbook 2.9.27 config file = /etc/ansible/ansible.cfg configured module search path = [u'/root/.ansible/plugins/modules', u'/usr/share/ansible/plugins/modules'] ansible python module location = /usr/lib/python2.7/site-packages/ansible executable location = /usr/bin/ansible-playbook python version = 2.7.5 (default, Nov 14 2023, 16:14:06) [GCC 4.8.5 20150623 (Red Hat 4.8.5-44)] Using /etc/ansible/ansible.cfg as config file [WARNING]: running playbook inside collection fedora.linux_system_roles statically imported: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml statically imported: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_test_data.yml statically imported: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/check_candlepin.yml statically imported: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_squid.yml statically imported: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_squid.yml Skipping callback 'actionable', as we already have a stdout callback. Skipping callback 'counter_enabled', as we already have a stdout callback. Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'dense', as we already have a stdout callback. Skipping callback 'dense', as we already have a stdout callback. Skipping callback 'full_skip', as we already have a stdout callback. Skipping callback 'json', as we already have a stdout callback. Skipping callback 'jsonl', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'null', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. Skipping callback 'selective', as we already have a stdout callback. Skipping callback 'skippy', as we already have a stdout callback. Skipping callback 'stderr', as we already have a stdout callback. Skipping callback 'unixy', as we already have a stdout callback. Skipping callback 'yaml', as we already have a stdout callback. PLAYBOOK: tests_proxy.yml ****************************************************** 1 plays in /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tests_proxy.yml PLAY [Basic proxy test] ******************************************************** META: ran handlers TASK [Get LSR_RHC_TEST_DATA environment variable] ****************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_test_data.yml:3 Saturday 08 November 2025 14:20:45 -0500 (0:00:00.032) 0:00:00.032 ***** ok: [managed-node1] => { "ansible_facts": { "lsr_rhc_test_data_file": "" }, "changed": false } TASK [Import test data] ******************************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_test_data.yml:12 Saturday 08 November 2025 14:20:45 -0500 (0:00:00.040) 0:00:00.073 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get facts for external test data] **************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_test_data.yml:16 Saturday 08 November 2025 14:20:45 -0500 (0:00:00.032) 0:00:00.105 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set local lsr_rhc_test_data] ********************************************* task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_test_data.yml:24 Saturday 08 November 2025 14:20:45 -0500 (0:00:00.031) 0:00:00.137 ***** ok: [managed-node1] => { "ansible_facts": { "lsr_rhc_test_data": { "baseurl": "http://localhost:8080", "candlepin_host": "candlepin.local", "candlepin_insecure": false, "candlepin_port": 8443, "candlepin_prefix": "/candlepin", "env_nonworking": "Ceci n'est pas une environment", "envs_register": [ "Environment 2" ], "insights": false, "proxy_auth_hostname": "localhost", "proxy_auth_password": "proxypass", "proxy_auth_port": 3130, "proxy_auth_scheme": "https", "proxy_auth_username": "proxyuser", "proxy_noauth_hostname": "localhost", "proxy_noauth_port": 3128, "proxy_noauth_scheme": "https", "proxy_nonworking_hostname": "wrongproxy", "proxy_nonworking_password": "wrong-proxypassword", "proxy_nonworking_port": 4000, "proxy_nonworking_username": "wrong-proxyuser", "reg_activation_keys": [ "default_key" ], "reg_invalid_password": "invalid-password", "reg_invalid_username": "invalid-user", "reg_organization": "donaldduck", "reg_password": "password", "reg_username": "doc", "release": null, "repositories": [ { "name": "donaldy-content-label-7051", "state": "enabled" }, { "name": "content-label-32060", "state": "disabled" } ] } }, "ansible_included_var_files": [ "/tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/../files/candlepin_data.yml" ], "changed": false } TASK [Check if system is ostree] *********************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_test_data.yml:32 Saturday 08 November 2025 14:20:45 -0500 (0:00:00.039) 0:00:00.176 ***** ok: [managed-node1] => { "ansible_facts": { "discovered_interpreter_python": "/usr/bin/python" }, "changed": false, "stat": { "exists": false } } TASK [Set flag to indicate system is ostree] *********************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_test_data.yml:37 Saturday 08 November 2025 14:20:46 -0500 (0:00:00.474) 0:00:00.651 ***** ok: [managed-node1] => { "ansible_facts": { "__rhc_is_ostree": false }, "changed": false } TASK [Get facts for external test data] **************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:9 Saturday 08 November 2025 14:20:46 -0500 (0:00:00.040) 0:00:00.691 ***** ok: [managed-node1] TASK [Set helper fact for Candlepin base URL] ********************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:17 Saturday 08 November 2025 14:20:46 -0500 (0:00:00.603) 0:00:01.295 ***** ok: [managed-node1] => { "ansible_facts": { "_cp_url": "https://candlepin.local:8443/candlepin" }, "changed": false } TASK [Set helper fact for Candlepin owner URL] ********************************* task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:21 Saturday 08 November 2025 14:20:46 -0500 (0:00:00.050) 0:00:01.346 ***** ok: [managed-node1] => { "ansible_facts": { "_cp_url_owner": "https://candlepin.local:8443/candlepin/owners/donaldduck" }, "changed": false } TASK [Add candlepin hostname to /etc/hosts] ************************************ task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:25 Saturday 08 November 2025 14:20:46 -0500 (0:00:00.039) 0:00:01.385 ***** changed: [managed-node1] => { "backup": "", "changed": true } MSG: line added TASK [Install needed packages] ************************************************* task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:31 Saturday 08 November 2025 14:20:47 -0500 (0:00:00.388) 0:00:01.774 ***** changed: [managed-node1] => { "changed": true, "changes": { "installed": [ "podman" ] }, "rc": 0, "results": [ "Loaded plugins: fastestmirror, product-id, search-disabled-repos, subscription-\n : manager\n\nThis system is not registered with an entitlement server. You can use subscription-manager to register.\n\nLoading mirror speeds from cached hostfile\nResolving Dependencies\n--> Running transaction check\n---> Package podman.x86_64 0:1.6.4-36.el7_9 will be installed\n--> Processing Dependency: slirp4netns >= 0.4.0-1 for package: podman-1.6.4-36.el7_9.x86_64\n--> Processing Dependency: runc >= 1.0.0-57 for package: podman-1.6.4-36.el7_9.x86_64\n--> Processing Dependency: containers-common >= 0.1.29-3 for package: podman-1.6.4-36.el7_9.x86_64\n--> Processing Dependency: containernetworking-plugins >= 0.8.1-1 for package: podman-1.6.4-36.el7_9.x86_64\n--> Processing Dependency: nftables for package: podman-1.6.4-36.el7_9.x86_64\n--> Processing Dependency: fuse-overlayfs for package: podman-1.6.4-36.el7_9.x86_64\n--> Processing Dependency: container-selinux for package: podman-1.6.4-36.el7_9.x86_64\n--> Processing Dependency: conmon for package: podman-1.6.4-36.el7_9.x86_64\n--> Running transaction check\n---> Package conmon.x86_64 2:2.0.8-1.el7 will be installed\n---> Package container-selinux.noarch 2:2.119.2-1.911c772.el7_8 will be installed\n---> Package containernetworking-plugins.x86_64 0:0.8.3-3.el7.centos will be installed\n---> Package containers-common.x86_64 1:0.1.40-11.el7_8 will be installed\n---> Package fuse-overlayfs.x86_64 0:0.7.2-6.el7_8 will be installed\n--> Processing Dependency: libfuse3.so.3(FUSE_3.2)(64bit) for package: fuse-overlayfs-0.7.2-6.el7_8.x86_64\n--> Processing Dependency: libfuse3.so.3(FUSE_3.0)(64bit) for package: fuse-overlayfs-0.7.2-6.el7_8.x86_64\n--> Processing Dependency: libfuse3.so.3()(64bit) for package: fuse-overlayfs-0.7.2-6.el7_8.x86_64\n---> Package nftables.x86_64 1:0.8-14.el7 will be installed\n--> Processing Dependency: libnftnl.so.7(LIBNFTNL_5)(64bit) for package: 1:nftables-0.8-14.el7.x86_64\n--> Processing Dependency: libnftnl.so.7()(64bit) for package: 1:nftables-0.8-14.el7.x86_64\n---> Package runc.x86_64 0:1.0.0-70.rc10.el7_9 will be installed\n--> Processing Dependency: criu for package: runc-1.0.0-70.rc10.el7_9.x86_64\n---> Package slirp4netns.x86_64 0:0.4.3-4.el7_8 will be installed\n--> Running transaction check\n---> Package criu.x86_64 0:3.12-2.el7 will be installed\n--> Processing Dependency: libprotobuf-c.so.1(LIBPROTOBUF_C_1.0.0)(64bit) for package: criu-3.12-2.el7.x86_64\n--> Processing Dependency: libprotobuf-c.so.1()(64bit) for package: criu-3.12-2.el7.x86_64\n--> Processing Dependency: libnet.so.1()(64bit) for package: criu-3.12-2.el7.x86_64\n---> Package fuse3-libs.x86_64 0:3.6.1-4.el7 will be installed\n---> Package libnftnl.x86_64 0:1.0.8-3.el7 will be installed\n--> Running transaction check\n---> Package libnet.x86_64 0:1.1.6-7.el7 will be installed\n---> Package protobuf-c.x86_64 0:1.0.2-3.el7 will be installed\n--> Finished Dependency Resolution\n\nDependencies Resolved\n\n================================================================================\n Package Arch Version Repository\n Size\n================================================================================\nInstalling:\n podman x86_64 1.6.4-36.el7_9 extras 13 M\nInstalling for dependencies:\n conmon x86_64 2:2.0.8-1.el7 extras 31 k\n container-selinux noarch 2:2.119.2-1.911c772.el7_8 extras 40 k\n containernetworking-plugins x86_64 0.8.3-3.el7.centos extras 20 M\n containers-common x86_64 1:0.1.40-11.el7_8 extras 43 k\n criu x86_64 3.12-2.el7 base 453 k\n fuse-overlayfs x86_64 0.7.2-6.el7_8 extras 54 k\n fuse3-libs x86_64 3.6.1-4.el7 extras 82 k\n libnet x86_64 1.1.6-7.el7 base 59 k\n libnftnl x86_64 1.0.8-3.el7 base 78 k\n nftables x86_64 1:0.8-14.el7 base 186 k\n protobuf-c x86_64 1.0.2-3.el7 base 28 k\n runc x86_64 1.0.0-70.rc10.el7_9 extras 2.7 M\n slirp4netns x86_64 0.4.3-4.el7_8 extras 81 k\n\nTransaction Summary\n================================================================================\nInstall 1 Package (+13 Dependent packages)\n\nTotal download size: 37 M\nInstalled size: 143 M\nDownloading packages:\n--------------------------------------------------------------------------------\nTotal 31 MB/s | 37 MB 00:01 \nRunning transaction check\nRunning transaction test\nTransaction test succeeded\nRunning transaction\n Installing : 2:container-selinux-2.119.2-1.911c772.el7_8.noarch 1/14 \n Installing : slirp4netns-0.4.3-4.el7_8.x86_64 2/14 \n Installing : containernetworking-plugins-0.8.3-3.el7.centos.x86_64 3/14 \n Installing : libnftnl-1.0.8-3.el7.x86_64 4/14 \n Installing : 1:nftables-0.8-14.el7.x86_64 5/14 \n Installing : 2:conmon-2.0.8-1.el7.x86_64 6/14 \n Installing : fuse3-libs-3.6.1-4.el7.x86_64 7/14 \n Installing : fuse-overlayfs-0.7.2-6.el7_8.x86_64 8/14 \n Installing : 1:containers-common-0.1.40-11.el7_8.x86_64 9/14 \n Installing : protobuf-c-1.0.2-3.el7.x86_64 10/14 \n Installing : libnet-1.1.6-7.el7.x86_64 11/14 \n Installing : criu-3.12-2.el7.x86_64 12/14 \n Installing : runc-1.0.0-70.rc10.el7_9.x86_64 13/14 \n Installing : podman-1.6.4-36.el7_9.x86_64 14/14 \n Verifying : libnet-1.1.6-7.el7.x86_64 1/14 \n Verifying : protobuf-c-1.0.2-3.el7.x86_64 2/14 \n Verifying : fuse3-libs-3.6.1-4.el7.x86_64 3/14 \n Verifying : podman-1.6.4-36.el7_9.x86_64 4/14 \n Verifying : fuse-overlayfs-0.7.2-6.el7_8.x86_64 5/14 \n Verifying : runc-1.0.0-70.rc10.el7_9.x86_64 6/14 \n Verifying : slirp4netns-0.4.3-4.el7_8.x86_64 7/14 \n Verifying : 1:nftables-0.8-14.el7.x86_64 8/14 \n Verifying : criu-3.12-2.el7.x86_64 9/14 \n Verifying : 2:conmon-2.0.8-1.el7.x86_64 10/14 \n Verifying : 1:containers-common-0.1.40-11.el7_8.x86_64 11/14 \n Verifying : libnftnl-1.0.8-3.el7.x86_64 12/14 \n Verifying : containernetworking-plugins-0.8.3-3.el7.centos.x86_64 13/14 \n Verifying : 2:container-selinux-2.119.2-1.911c772.el7_8.noarch 14/14 \n\nInstalled:\n podman.x86_64 0:1.6.4-36.el7_9 \n\nDependency Installed:\n conmon.x86_64 2:2.0.8-1.el7 \n container-selinux.noarch 2:2.119.2-1.911c772.el7_8 \n containernetworking-plugins.x86_64 0:0.8.3-3.el7.centos \n containers-common.x86_64 1:0.1.40-11.el7_8 \n criu.x86_64 0:3.12-2.el7 \n fuse-overlayfs.x86_64 0:0.7.2-6.el7_8 \n fuse3-libs.x86_64 0:3.6.1-4.el7 \n libnet.x86_64 0:1.1.6-7.el7 \n libnftnl.x86_64 0:1.0.8-3.el7 \n nftables.x86_64 1:0.8-14.el7 \n protobuf-c.x86_64 0:1.0.2-3.el7 \n runc.x86_64 0:1.0.0-70.rc10.el7_9 \n slirp4netns.x86_64 0:0.4.3-4.el7_8 \n\nComplete!\n" ] } lsrpackages: podman TASK [Clean up Candlepin container] ******************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:38 Saturday 08 November 2025 14:21:06 -0500 (0:00:19.195) 0:00:20.969 ***** included: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/teardown_candlepin.yml for managed-node1 TASK [Check if the candlepin container exists] ********************************* task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/teardown_candlepin.yml:6 Saturday 08 November 2025 14:21:06 -0500 (0:00:00.046) 0:00:21.016 ***** ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "ps", "-a", "--filter", "name=candlepin" ], "delta": "0:00:01.889761", "end": "2025-11-08 14:21:08.835436", "rc": 0, "start": "2025-11-08 14:21:06.945675" } STDOUT: CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES TASK [Ensure that Candlepin container doesn't exist] *************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/teardown_candlepin.yml:17 Saturday 08 November 2025 14:21:08 -0500 (0:00:02.322) 0:00:23.339 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Start Candlepin container] *********************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:41 Saturday 08 November 2025 14:21:08 -0500 (0:00:00.037) 0:00:23.376 ***** ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "run", "--rm", "--detach", "--hostname", "candlepin.local", "--name", "candlepin", "--publish", "8443:8443", "--publish", "8080:8080", "--privileged", "ghcr.io/candlepin/candlepin-unofficial" ], "delta": "0:00:26.060011", "end": "2025-11-08 14:21:35.239965", "rc": 0, "start": "2025-11-08 14:21:09.179954" } STDOUT: a99b6d77852abd277ac5765870cbf5196206324e7d881325ff3e0efcb98c30fc STDERR: Trying to pull ghcr.io/candlepin/candlepin-unofficial... Getting image source signatures Copying blob sha256:868e32b0b96932a9d44af4fddb5291921afffb37e16e8c9bc0382ef20f02e4a0 Copying config sha256:fc49ff13f7f3d9b39189a4dadc708bc5cf2aea44997b748d698128d169c494b8 Writing manifest to image destination Storing signatures TASK [Ensure directories exist] ************************************************ task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:62 Saturday 08 November 2025 14:21:35 -0500 (0:00:26.388) 0:00:49.765 ***** ok: [managed-node1] => (item=/etc/pki/product) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "/etc/pki/product", "mode": "0755", "owner": "root", "path": "/etc/pki/product", "secontext": "unconfined_u:object_r:cert_t:s0", "size": 4096, "state": "directory", "uid": 0 } ok: [managed-node1] => (item=/etc/pki/product-default) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "/etc/pki/product-default", "mode": "0755", "owner": "root", "path": "/etc/pki/product-default", "secontext": "unconfined_u:object_r:cert_t:s0", "size": 4096, "state": "directory", "uid": 0 } ok: [managed-node1] => (item=/etc/rhsm/ca) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "/etc/rhsm/ca", "mode": "0755", "owner": "root", "path": "/etc/rhsm/ca", "secontext": "system_u:object_r:rhsmcertd_config_t:s0", "size": 4096, "state": "directory", "uid": 0 } TASK [Copy product certificates] *********************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:72 Saturday 08 November 2025 14:21:37 -0500 (0:00:02.032) 0:00:51.797 ***** ok: [managed-node1] => (item=7050) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "podman", "cp", "candlepin:/home/candlepin/devel/candlepin/generated_certs/7050.pem", "/etc/pki/product-default/" ], "delta": "0:00:00.499379", "end": "2025-11-08 14:21:38.579471", "item": "7050", "rc": 0, "start": "2025-11-08 14:21:38.080092" } TASK [Copy Candlepin CA certificate for subscription-manager] ****************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:83 Saturday 08 November 2025 14:21:38 -0500 (0:00:01.393) 0:00:53.191 ***** ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "cp", "candlepin:/etc/candlepin/certs/candlepin-ca.crt", "/etc/rhsm/ca/candlepin-ca.pem" ], "delta": "0:00:00.446365", "end": "2025-11-08 14:21:39.859503", "rc": 0, "start": "2025-11-08 14:21:39.413138" } TASK [Copy Candlepin CA certificate for system] ******************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:92 Saturday 08 November 2025 14:21:40 -0500 (0:00:01.272) 0:00:54.464 ***** ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "cp", "candlepin:/etc/candlepin/certs/candlepin-ca.crt", "/etc/pki/ca-trust/source/anchors/candlepin-ca.pem" ], "delta": "0:00:00.511370", "end": "2025-11-08 14:21:41.300502", "rc": 0, "start": "2025-11-08 14:21:40.789132" } TASK [Update system certificates store] **************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:101 Saturday 08 November 2025 14:21:41 -0500 (0:00:01.462) 0:00:55.927 ***** ok: [managed-node1] => { "changed": false, "cmd": [ "update-ca-trust", "extract" ], "delta": "0:00:02.421261", "end": "2025-11-08 14:21:44.747740", "rc": 0, "start": "2025-11-08 14:21:42.326479" } TASK [Wait for started Candlepin] ********************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:108 Saturday 08 November 2025 14:21:44 -0500 (0:00:03.409) 0:00:59.337 ***** ok: [managed-node1] => { "attempts": 1, "changed": false, "connection": "close", "content_type": "application/json", "cookies": {}, "cookies_string": "", "date": "Sat, 08 Nov 2025 19:22:01 GMT", "elapsed": 16, "redirected": true, "status": 200, "transfer_encoding": "chunked", "url": "https://candlepin.local:8443/candlepin/", "vary": "accept-encoding", "x_candlepin_request_uuid": "8cdd44a5-a798-458b-ae97-205b7c301829", "x_version": "4.7.1-1" } MSG: OK (unknown bytes) TASK [Install GPG key for RPM repositories] ************************************ task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:118 Saturday 08 November 2025 14:22:01 -0500 (0:00:17.091) 0:01:16.428 ***** changed: [managed-node1] => { "changed": true, "checksum_dest": null, "checksum_src": "5bd09883847285c54e6064f29dd9686c1afa5d72", "dest": "/etc/pki/rpm-gpg/RPM-GPG-KEY-candlepin", "elapsed": 0, "gid": 0, "group": "root", "md5sum": "aadf73f83655a28e287fab4099f1e17a", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:cert_t:s0", "size": 1660, "src": "/root/.ansible/tmp/ansible-tmp-1762629722.04-12600-141126453331434/tmpkwg6Vl", "state": "file", "status_code": 200, "uid": 0, "url": "http://candlepin.local:8080/RPM-GPG-KEY-candlepin" } MSG: OK (1660 bytes) TASK [Add environments] ******************************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:127 Saturday 08 November 2025 14:22:02 -0500 (0:00:00.554) 0:01:16.983 ***** skipping: [managed-node1] => (item={u'id': u'envId1', u'name': u'Environment 1', u'desc': u'The environment 1'}) => { "ansible_loop_var": "item", "changed": false, "item": { "desc": "The environment 1", "id": "envId1", "name": "Environment 1" }, "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item={u'id': u'envId2', u'name': u'Environment 2', u'desc': u'The environment 2'}) => { "ansible_loop_var": "item", "changed": false, "item": { "desc": "The environment 2", "id": "envId2", "name": "Environment 2" }, "skip_reason": "Conditional result was False" } TASK [Check Candlepin works] *************************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/check_candlepin.yml:3 Saturday 08 November 2025 14:22:02 -0500 (0:00:00.051) 0:01:17.035 ***** ok: [managed-node1] => { "changed": false, "connection": "close", "content_type": "application/json", "cookies": {}, "cookies_string": "", "date": "Sat, 08 Nov 2025 19:22:02 GMT", "elapsed": 0, "redirected": true, "status": 200, "transfer_encoding": "chunked", "url": "https://candlepin.local:8443/candlepin/", "vary": "accept-encoding", "x_candlepin_request_uuid": "acc271a5-0666-41d3-ad5c-11324ea34d97", "x_version": "4.7.1-1" } MSG: OK (unknown bytes) TASK [Install packages for squid] ********************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_squid.yml:7 Saturday 08 November 2025 14:22:03 -0500 (0:00:00.458) 0:01:17.493 ***** changed: [managed-node1] => { "changed": true, "changes": { "installed": [ "squid", "httpd-tools" ] }, "rc": 0, "results": [ "Loaded plugins: fastestmirror, product-id, search-disabled-repos, subscription-\n : manager\n\nThis system is not registered with an entitlement server. You can use subscription-manager to register.\n\nLoading mirror speeds from cached hostfile\nResolving Dependencies\n--> Running transaction check\n---> Package httpd-tools.x86_64 0:2.4.6-99.el7.centos.1 will be installed\n--> Processing Dependency: libaprutil-1.so.0()(64bit) for package: httpd-tools-2.4.6-99.el7.centos.1.x86_64\n--> Processing Dependency: libapr-1.so.0()(64bit) for package: httpd-tools-2.4.6-99.el7.centos.1.x86_64\n---> Package squid.x86_64 7:3.5.20-17.el7_9.10 will be installed\n--> Processing Dependency: squid-migration-script for package: 7:squid-3.5.20-17.el7_9.10.x86_64\n--> Processing Dependency: perl(DBI) for package: 7:squid-3.5.20-17.el7_9.10.x86_64\n--> Processing Dependency: libltdl.so.7()(64bit) for package: 7:squid-3.5.20-17.el7_9.10.x86_64\n--> Processing Dependency: libecap.so.3()(64bit) for package: 7:squid-3.5.20-17.el7_9.10.x86_64\n--> Running transaction check\n---> Package apr.x86_64 0:1.4.8-7.el7 will be installed\n---> Package apr-util.x86_64 0:1.5.2-6.el7_9.1 will be installed\n---> Package libecap.x86_64 0:1.0.0-1.el7 will be installed\n---> Package libtool-ltdl.x86_64 0:2.4.2-22.el7_3 will be installed\n---> Package perl-DBI.x86_64 0:1.627-4.el7 will be installed\n--> Processing Dependency: perl(RPC::PlServer) >= 0.2001 for package: perl-DBI-1.627-4.el7.x86_64\n--> Processing Dependency: perl(RPC::PlClient) >= 0.2000 for package: perl-DBI-1.627-4.el7.x86_64\n---> Package squid-migration-script.x86_64 7:3.5.20-17.el7_9.10 will be installed\n--> Running transaction check\n---> Package perl-PlRPC.noarch 0:0.2020-14.el7 will be installed\n--> Processing Dependency: perl(Net::Daemon) >= 0.13 for package: perl-PlRPC-0.2020-14.el7.noarch\n--> Processing Dependency: perl(Net::Daemon::Test) for package: perl-PlRPC-0.2020-14.el7.noarch\n--> Processing Dependency: perl(Net::Daemon::Log) for package: perl-PlRPC-0.2020-14.el7.noarch\n--> Running transaction check\n---> Package perl-Net-Daemon.noarch 0:0.48-5.el7 will be installed\n--> Finished Dependency Resolution\n\nDependencies Resolved\n\n================================================================================\n Package Arch Version Repository Size\n================================================================================\nInstalling:\n httpd-tools x86_64 2.4.6-99.el7.centos.1 updates 94 k\n squid x86_64 7:3.5.20-17.el7_9.10 updates 3.1 M\nInstalling for dependencies:\n apr x86_64 1.4.8-7.el7 base 104 k\n apr-util x86_64 1.5.2-6.el7_9.1 updates 92 k\n libecap x86_64 1.0.0-1.el7 base 21 k\n libtool-ltdl x86_64 2.4.2-22.el7_3 base 49 k\n perl-DBI x86_64 1.627-4.el7 base 802 k\n perl-Net-Daemon noarch 0.48-5.el7 base 51 k\n perl-PlRPC noarch 0.2020-14.el7 base 36 k\n squid-migration-script x86_64 7:3.5.20-17.el7_9.10 updates 52 k\n\nTransaction Summary\n================================================================================\nInstall 2 Packages (+8 Dependent packages)\n\nTotal download size: 4.4 M\nInstalled size: 13 M\nDownloading packages:\n--------------------------------------------------------------------------------\nTotal 6.8 MB/s | 4.4 MB 00:00 \nRunning transaction check\nRunning transaction test\nTransaction test succeeded\nRunning transaction\n Installing : apr-1.4.8-7.el7.x86_64 1/10 \n Installing : apr-util-1.5.2-6.el7_9.1.x86_64 2/10 \n Installing : libtool-ltdl-2.4.2-22.el7_3.x86_64 3/10 \n Installing : libecap-1.0.0-1.el7.x86_64 4/10 \n Installing : 7:squid-migration-script-3.5.20-17.el7_9.10.x86_64 5/10 \n Installing : perl-Net-Daemon-0.48-5.el7.noarch 6/10 \n Installing : perl-PlRPC-0.2020-14.el7.noarch 7/10 \n Installing : perl-DBI-1.627-4.el7.x86_64 8/10 \n Installing : 7:squid-3.5.20-17.el7_9.10.x86_64 9/10 \n Installing : httpd-tools-2.4.6-99.el7.centos.1.x86_64 10/10 \n Verifying : perl-Net-Daemon-0.48-5.el7.noarch 1/10 \n Verifying : httpd-tools-2.4.6-99.el7.centos.1.x86_64 2/10 \n Verifying : 7:squid-migration-script-3.5.20-17.el7_9.10.x86_64 3/10 \n Verifying : apr-util-1.5.2-6.el7_9.1.x86_64 4/10 \n Verifying : apr-1.4.8-7.el7.x86_64 5/10 \n Verifying : libecap-1.0.0-1.el7.x86_64 6/10 \n Verifying : libtool-ltdl-2.4.2-22.el7_3.x86_64 7/10 \n Verifying : 7:squid-3.5.20-17.el7_9.10.x86_64 8/10 \n Verifying : perl-DBI-1.627-4.el7.x86_64 9/10 \n Verifying : perl-PlRPC-0.2020-14.el7.noarch 10/10 \n\nInstalled:\n httpd-tools.x86_64 0:2.4.6-99.el7.centos.1 squid.x86_64 7:3.5.20-17.el7_9.10 \n\nDependency Installed:\n apr.x86_64 0:1.4.8-7.el7 \n apr-util.x86_64 0:1.5.2-6.el7_9.1 \n libecap.x86_64 0:1.0.0-1.el7 \n libtool-ltdl.x86_64 0:2.4.2-22.el7_3 \n perl-DBI.x86_64 0:1.627-4.el7 \n perl-Net-Daemon.noarch 0:0.48-5.el7 \n perl-PlRPC.noarch 0:0.2020-14.el7 \n squid-migration-script.x86_64 7:3.5.20-17.el7_9.10 \n\nComplete!\n" ] } lsrpackages: httpd-tools squid TASK [Check the status of the backup of configuration] ************************* task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_squid.yml:15 Saturday 08 November 2025 14:22:09 -0500 (0:00:06.038) 0:01:23.531 ***** ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [Backup the configuration] ************************************************ task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_squid.yml:20 Saturday 08 November 2025 14:22:09 -0500 (0:00:00.327) 0:01:23.858 ***** changed: [managed-node1] => { "changed": true, "checksum": "70f953b530665efdfdd23b298cf8eddbacda74c0", "dest": "/etc/squid/squid.conf.BACKUP", "gid": 0, "group": "root", "md5sum": "3c8af1986cc652e972c35db57e808d36", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:squid_conf_t:s0", "size": 2244, "src": "/etc/squid/squid.conf", "state": "file", "uid": 0 } TASK [Copy the pristine configuration back] ************************************ task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_squid.yml:29 Saturday 08 November 2025 14:22:09 -0500 (0:00:00.503) 0:01:24.362 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Open the Candlepin port] ************************************************* task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_squid.yml:38 Saturday 08 November 2025 14:22:09 -0500 (0:00:00.037) 0:01:24.400 ***** changed: [managed-node1] => { "backup": "", "changed": true } MSG: line added TASK [Set the shutdown lifetime] *********************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_squid.yml:47 Saturday 08 November 2025 14:22:10 -0500 (0:00:00.305) 0:01:24.706 ***** changed: [managed-node1] => { "backup": "", "changed": true } MSG: line added TASK [Set the port] ************************************************************ task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_squid.yml:57 Saturday 08 November 2025 14:22:10 -0500 (0:00:00.310) 0:01:25.017 ***** ok: [managed-node1] => { "backup": "", "changed": false } TASK [Create the new passwd file] ********************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_squid.yml:66 Saturday 08 November 2025 14:22:10 -0500 (0:00:00.302) 0:01:25.319 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set the port] ************************************************************ task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_squid.yml:78 Saturday 08 November 2025 14:22:10 -0500 (0:00:00.037) 0:01:25.356 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Disable HTTP access allow] *********************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_squid.yml:84 Saturday 08 November 2025 14:22:10 -0500 (0:00:00.037) 0:01:25.394 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Insert initial auth config] ********************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_squid.yml:90 Saturday 08 November 2025 14:22:10 -0500 (0:00:00.036) 0:01:25.430 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Add authenticated acl] *************************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_squid.yml:103 Saturday 08 November 2025 14:22:11 -0500 (0:00:00.038) 0:01:25.469 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Allow authenticated acl] ************************************************* task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_squid.yml:111 Saturday 08 November 2025 14:22:11 -0500 (0:00:00.038) 0:01:25.508 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Restart squid] *********************************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_squid.yml:119 Saturday 08 November 2025 14:22:11 -0500 (0:00:00.039) 0:01:25.547 ***** changed: [managed-node1] => { "changed": true, "name": "squid", "state": "started", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "nss-lookup.target basic.target systemd-journald.socket syslog.target network.target system.slice", "AllowIsolate": "no", "AmbientCapabilities": "0", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "18446744073709551615", "CPUAccounting": "no", "CPUQuotaPerSecUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "18446744073709551615", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "18446744073709551615", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "Delegate": "no", "Description": "Squid caching proxy", "DevicePolicy": "auto", "EnvironmentFile": "/etc/sysconfig/squid (ignore_errors=no)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecReload": "{ path=/usr/sbin/squid ; argv[]=/usr/sbin/squid $SQUID_OPTS -k reconfigure -f $SQUID_CONF ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/squid ; argv[]=/usr/sbin/squid $SQUID_OPTS -f $SQUID_CONF ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPre": "{ path=/usr/libexec/squid/cache_swap.sh ; argv[]=/usr/libexec/squid/cache_swap.sh ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/sbin/squid ; argv[]=/usr/sbin/squid -k shutdown -f $SQUID_CONF ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/squid.service", "GuessMainPID": "yes", "IOScheduling": "0", "Id": "squid.service", "IgnoreOnIsolate": "no", "IgnoreOnSnapshot": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobTimeoutAction": "none", "JobTimeoutUSec": "0", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "18446744073709551615", "LimitCORE": "18446744073709551615", "LimitCPU": "18446744073709551615", "LimitDATA": "18446744073709551615", "LimitFSIZE": "18446744073709551615", "LimitLOCKS": "18446744073709551615", "LimitMEMLOCK": "65536", "LimitMSGQUEUE": "819200", "LimitNICE": "0", "LimitNOFILE": "16384", "LimitNPROC": "14311", "LimitRSS": "18446744073709551615", "LimitRTPRIO": "0", "LimitRTTIME": "18446744073709551615", "LimitSIGPENDING": "14311", "LimitSTACK": "18446744073709551615", "LoadState": "loaded", "MainPID": "0", "MemoryAccounting": "no", "MemoryCurrent": "18446744073709551615", "MemoryLimit": "18446744073709551615", "MountFlags": "0", "Names": "squid.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "PrivateDevices": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "ProtectHome": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "Requires": "basic.target system.slice", "Restart": "no", "RestartUSec": "100ms", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitInterval": "10000000", "StartupBlockIOWeight": "18446744073709551615", "StartupCPUShares": "18446744073709551615", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "no", "TasksCurrent": "18446744073709551615", "TasksMax": "18446744073709551615", "TimeoutStartUSec": "0", "TimeoutStopUSec": "0", "TimerSlackNSec": "50000", "Transient": "no", "Type": "forking", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [Add SELinux policy for proxy ports] ************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tests_proxy.yml:26 Saturday 08 November 2025 14:22:12 -0500 (0:00:00.955) 0:01:26.502 ***** TASK [fedora.linux_system_roles.selinux : Set ansible_facts required by role and install packages] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:2 Saturday 08 November 2025 14:22:12 -0500 (0:00:00.143) 0:01:26.646 ***** included: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml for managed-node1 TASK [fedora.linux_system_roles.selinux : Ensure ansible_facts used by role] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:2 Saturday 08 November 2025 14:22:12 -0500 (0:00:00.020) 0:01:26.666 ***** ok: [managed-node1] TASK [fedora.linux_system_roles.selinux : Ensure SELinux packages] ************* task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:7 Saturday 08 November 2025 14:22:12 -0500 (0:00:00.370) 0:01:27.037 ***** included: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml for managed-node1 TASK [fedora.linux_system_roles.selinux : Check if system is ostree] *********** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:5 Saturday 08 November 2025 14:22:12 -0500 (0:00:00.034) 0:01:27.071 ***** ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.selinux : Set flag to indicate system is ostree] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:10 Saturday 08 November 2025 14:22:12 -0500 (0:00:00.290) 0:01:27.362 ***** ok: [managed-node1] => { "ansible_facts": { "__selinux_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:17 Saturday 08 November 2025 14:22:12 -0500 (0:00:00.040) 0:01:27.402 ***** ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.selinux : Set flag if transactional-update exists] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:22 Saturday 08 November 2025 14:22:13 -0500 (0:00:00.285) 0:01:27.687 ***** ok: [managed-node1] => { "ansible_facts": { "__selinux_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Install SELinux python2 tools] ******* task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:26 Saturday 08 November 2025 14:22:13 -0500 (0:00:00.041) 0:01:27.729 ***** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [ "libselinux-python-2.5-15.el7.x86_64 providing libselinux-python is already installed", "policycoreutils-python-2.5-34.el7.x86_64 providing policycoreutils-python is already installed" ] } lsrpackages: libselinux-python policycoreutils-python TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 Saturday 08 November 2025 14:22:14 -0500 (0:00:00.850) 0:01:28.580 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:46 Saturday 08 November 2025 14:22:14 -0500 (0:00:00.036) 0:01:28.616 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Ensure grubby used to modify selinux kernel parameter] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 Saturday 08 November 2025 14:22:14 -0500 (0:00:00.035) 0:01:28.652 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux tool semanage] ******* task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:67 Saturday 08 November 2025 14:22:14 -0500 (0:00:00.038) 0:01:28.690 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:81 Saturday 08 November 2025 14:22:14 -0500 (0:00:00.035) 0:01:28.726 ***** skipping: [managed-node1] => {} TASK [fedora.linux_system_roles.selinux : Reboot transactional update systems] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:86 Saturday 08 November 2025 14:22:14 -0500 (0:00:00.035) 0:01:28.761 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Fail if reboot is needed and not set] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:91 Saturday 08 November 2025 14:22:14 -0500 (0:00:00.040) 0:01:28.802 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Refresh facts] *********************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:98 Saturday 08 November 2025 14:22:14 -0500 (0:00:00.039) 0:01:28.842 ***** ok: [managed-node1] TASK [fedora.linux_system_roles.selinux : Run systemctl] *********************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:8 Saturday 08 November 2025 14:22:14 -0500 (0:00:00.537) 0:01:29.379 ***** ok: [managed-node1] => { "changed": false, "cmd": [ "systemctl", "is-system-running" ], "delta": "0:00:00.006430", "end": "2025-11-08 14:22:15.170267", "failed_when_result": false, "rc": 0, "start": "2025-11-08 14:22:15.163837" } STDOUT: running TASK [fedora.linux_system_roles.selinux : Require installed systemd] *********** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:15 Saturday 08 November 2025 14:22:15 -0500 (0:00:00.287) 0:01:29.667 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:20 Saturday 08 November 2025 14:22:15 -0500 (0:00:00.037) 0:01:29.705 ***** ok: [managed-node1] => { "ansible_facts": { "__selinux_is_booted": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if enabled] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:29 Saturday 08 November 2025 14:22:15 -0500 (0:00:00.040) 0:01:29.746 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if disabled] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:40 Saturday 08 November 2025 14:22:15 -0500 (0:00:00.037) 0:01:29.784 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set selinux_reboot_required] ********* task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:52 Saturday 08 November 2025 14:22:15 -0500 (0:00:00.035) 0:01:29.819 ***** ok: [managed-node1] => { "ansible_facts": { "selinux_reboot_required": false }, "changed": false } TASK [Add or remove selinux=0 from args as needed] ***************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:56 Saturday 08 November 2025 14:22:15 -0500 (0:00:00.044) 0:01:29.863 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Fail if reboot is required] ********** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:70 Saturday 08 November 2025 14:22:15 -0500 (0:00:00.039) 0:01:29.903 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Warn if SELinux is disabled] ********* task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:77 Saturday 08 November 2025 14:22:15 -0500 (0:00:00.012) 0:01:29.915 ***** skipping: [managed-node1] => {} TASK [fedora.linux_system_roles.selinux : Drop all local modifications] ******** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:82 Saturday 08 November 2025 14:22:15 -0500 (0:00:00.033) 0:01:29.949 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux boolean local modifications] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:90 Saturday 08 November 2025 14:22:15 -0500 (0:00:00.035) 0:01:29.985 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux file context local modifications] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:95 Saturday 08 November 2025 14:22:15 -0500 (0:00:00.034) 0:01:30.019 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux port local modifications] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:100 Saturday 08 November 2025 14:22:15 -0500 (0:00:00.034) 0:01:30.054 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux login local modifications] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:105 Saturday 08 November 2025 14:22:15 -0500 (0:00:00.036) 0:01:30.090 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set SELinux booleans] **************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:110 Saturday 08 November 2025 14:22:15 -0500 (0:00:00.035) 0:01:30.125 ***** TASK [fedora.linux_system_roles.selinux : Set SELinux file contexts] *********** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:121 Saturday 08 November 2025 14:22:15 -0500 (0:00:00.033) 0:01:30.159 ***** TASK [fedora.linux_system_roles.selinux : Set an SELinux label on a port] ****** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:134 Saturday 08 November 2025 14:22:15 -0500 (0:00:00.034) 0:01:30.193 ***** changed: [managed-node1] => (item={u'setype': u'squid_port_t', u'proto': u'tcp', u'local': True, u'state': u'present', u'ports': [u'3128', u'3130', u'4000']}) => { "__selinux_item": { "local": true, "ports": [ "3128", "3130", "4000" ], "proto": "tcp", "setype": "squid_port_t", "state": "present" }, "ansible_loop_var": "__selinux_item", "changed": true, "ports": [ "3128", "3130", "4000" ], "proto": "tcp", "setype": "squid_port_t", "state": "present" } TASK [fedora.linux_system_roles.selinux : Set linux user to SELinux user mapping] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:146 Saturday 08 November 2025 14:22:22 -0500 (0:00:07.129) 0:01:37.323 ***** TASK [fedora.linux_system_roles.selinux : Get SELinux modules facts] *********** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:159 Saturday 08 November 2025 14:22:22 -0500 (0:00:00.035) 0:01:37.358 ***** ok: [managed-node1] => { "ansible_facts": { "selinux_checksums": false, "selinux_installed_modules": { "abrt": { "100": { "checksum": "", "enabled": 1 } }, "accountsd": { "100": { "checksum": "", "enabled": 1 } }, "acct": { "100": { "checksum": "", "enabled": 1 } }, "afs": { "100": { "checksum": "", "enabled": 1 } }, "aiccu": { "100": { "checksum": "", "enabled": 1 } }, "aide": { "100": { "checksum": "", "enabled": 1 } }, "ajaxterm": { "100": { "checksum": "", "enabled": 1 } }, "alsa": { "100": { "checksum": "", "enabled": 1 } }, "amanda": { "100": { "checksum": "", "enabled": 1 } }, "amtu": { "100": { "checksum": "", "enabled": 1 } }, "anaconda": { "100": { "checksum": "", "enabled": 1 } }, "antivirus": { "100": { "checksum": "", "enabled": 1 } }, "apache": { "100": { "checksum": "", "enabled": 1 } }, "apcupsd": { "100": { "checksum": "", "enabled": 1 } }, "apm": { "100": { "checksum": "", "enabled": 1 } }, "application": { "100": { "checksum": "", "enabled": 1 } }, "arpwatch": { "100": { "checksum": "", "enabled": 1 } }, "asterisk": { "100": { "checksum": "", "enabled": 1 } }, "auditadm": { "100": { "checksum": "", "enabled": 1 } }, "authconfig": { "100": { "checksum": "", "enabled": 1 } }, "authlogin": { "100": { "checksum": "", "enabled": 1 } }, "automount": { "100": { "checksum": "", "enabled": 1 } }, "avahi": { "100": { "checksum": "", "enabled": 1 } }, "awstats": { "100": { "checksum": "", "enabled": 1 } }, "bacula": { "100": { "checksum": "", "enabled": 1 } }, "base": { "100": { "checksum": "", "enabled": 1 } }, "bcfg2": { "100": { "checksum": "", "enabled": 1 } }, "bind": { "100": { "checksum": "", "enabled": 1 } }, "bitlbee": { "100": { "checksum": "", "enabled": 1 } }, "blkmapd": { "100": { "checksum": "", "enabled": 1 } }, "blueman": { "100": { "checksum": "", "enabled": 1 } }, "bluetooth": { "100": { "checksum": "", "enabled": 1 } }, "boinc": { "100": { "checksum": "", "enabled": 1 } }, "boltd": { "100": { "checksum": "", "enabled": 1 } }, "bootloader": { "100": { "checksum": "", "enabled": 1 } }, "brctl": { "100": { "checksum": "", "enabled": 1 } }, "brltty": { "100": { "checksum": "", "enabled": 1 } }, "bugzilla": { "100": { "checksum": "", "enabled": 1 } }, "bumblebee": { "100": { "checksum": "", "enabled": 1 } }, "cachefilesd": { "100": { "checksum": "", "enabled": 1 } }, "calamaris": { "100": { "checksum": "", "enabled": 1 } }, "callweaver": { "100": { "checksum": "", "enabled": 1 } }, "canna": { "100": { "checksum": "", "enabled": 1 } }, "ccs": { "100": { "checksum": "", "enabled": 1 } }, "cdrecord": { "100": { "checksum": "", "enabled": 1 } }, "certmaster": { "100": { "checksum": "", "enabled": 1 } }, "certmonger": { "100": { "checksum": "", "enabled": 1 } }, "certwatch": { "100": { "checksum": "", "enabled": 1 } }, "cfengine": { "100": { "checksum": "", "enabled": 1 } }, "cgdcbxd": { "100": { "checksum": "", "enabled": 1 } }, "cgroup": { "100": { "checksum": "", "enabled": 1 } }, "chrome": { "100": { "checksum": "", "enabled": 1 } }, "chronyd": { "100": { "checksum": "", "enabled": 1 } }, "cinder": { "100": { "checksum": "", "enabled": 1 } }, "cipe": { "100": { "checksum": "", "enabled": 1 } }, "clock": { "100": { "checksum": "", "enabled": 1 } }, "clogd": { "100": { "checksum": "", "enabled": 1 } }, "cloudform": { "100": { "checksum": "", "enabled": 1 } }, "cmirrord": { "100": { "checksum": "", "enabled": 1 } }, "cobbler": { "100": { "checksum": "", "enabled": 1 } }, "cockpit": { "100": { "checksum": "", "enabled": 1 } }, "collectd": { "100": { "checksum": "", "enabled": 1 } }, "colord": { "100": { "checksum": "", "enabled": 1 } }, "comsat": { "100": { "checksum": "", "enabled": 1 } }, "condor": { "100": { "checksum": "", "enabled": 1 } }, "conman": { "100": { "checksum": "", "enabled": 1 } }, "consolekit": { "100": { "checksum": "", "enabled": 1 } }, "container": { "100": { "checksum": "", "enabled": 1 }, "200": { "checksum": "", "enabled": 1 } }, "couchdb": { "100": { "checksum": "", "enabled": 1 } }, "courier": { "100": { "checksum": "", "enabled": 1 } }, "cpucontrol": { "100": { "checksum": "", "enabled": 1 } }, "cpufreqselector": { "100": { "checksum": "", "enabled": 1 } }, "cpuplug": { "100": { "checksum": "", "enabled": 1 } }, "cron": { "100": { "checksum": "", "enabled": 1 } }, "ctdb": { "100": { "checksum": "", "enabled": 1 } }, "cups": { "100": { "checksum": "", "enabled": 1 } }, "cvs": { "100": { "checksum": "", "enabled": 1 } }, "cyphesis": { "100": { "checksum": "", "enabled": 1 } }, "cyrus": { "100": { "checksum": "", "enabled": 1 } }, "daemontools": { "100": { "checksum": "", "enabled": 1 } }, "dbadm": { "100": { "checksum": "", "enabled": 1 } }, "dbskk": { "100": { "checksum": "", "enabled": 1 } }, "dbus": { "100": { "checksum": "", "enabled": 1 } }, "dcc": { "100": { "checksum": "", "enabled": 1 } }, "ddclient": { "100": { "checksum": "", "enabled": 1 } }, "denyhosts": { "100": { "checksum": "", "enabled": 1 } }, "devicekit": { "100": { "checksum": "", "enabled": 1 } }, "dhcp": { "100": { "checksum": "", "enabled": 1 } }, "dictd": { "100": { "checksum": "", "enabled": 1 } }, "dirsrv": { "100": { "checksum": "", "enabled": 1 } }, "dirsrv-admin": { "100": { "checksum": "", "enabled": 1 } }, "dmesg": { "100": { "checksum": "", "enabled": 1 } }, "dmidecode": { "100": { "checksum": "", "enabled": 1 } }, "dnsmasq": { "100": { "checksum": "", "enabled": 1 } }, "dnssec": { "100": { "checksum": "", "enabled": 1 } }, "dovecot": { "100": { "checksum": "", "enabled": 1 } }, "drbd": { "100": { "checksum": "", "enabled": 1 } }, "dspam": { "100": { "checksum": "", "enabled": 1 } }, "entropyd": { "100": { "checksum": "", "enabled": 1 } }, "exim": { "100": { "checksum": "", "enabled": 1 } }, "fail2ban": { "100": { "checksum": "", "enabled": 1 } }, "fcoe": { "100": { "checksum": "", "enabled": 1 } }, "fetchmail": { "100": { "checksum": "", "enabled": 1 } }, "finger": { "100": { "checksum": "", "enabled": 1 } }, "firewalld": { "100": { "checksum": "", "enabled": 1 } }, "firewallgui": { "100": { "checksum": "", "enabled": 1 } }, "firstboot": { "100": { "checksum": "", "enabled": 1 } }, "fprintd": { "100": { "checksum": "", "enabled": 1 } }, "freeipmi": { "100": { "checksum": "", "enabled": 1 } }, "freqset": { "100": { "checksum": "", "enabled": 1 } }, "fstools": { "100": { "checksum": "", "enabled": 1 } }, "ftp": { "100": { "checksum": "", "enabled": 1 } }, "games": { "100": { "checksum": "", "enabled": 1 } }, "ganesha": { "100": { "checksum": "", "enabled": 1 } }, "gdomap": { "100": { "checksum": "", "enabled": 1 } }, "geoclue": { "100": { "checksum": "", "enabled": 1 } }, "getty": { "100": { "checksum": "", "enabled": 1 } }, "git": { "100": { "checksum": "", "enabled": 1 } }, "gitosis": { "100": { "checksum": "", "enabled": 1 } }, "glance": { "100": { "checksum": "", "enabled": 1 } }, "glusterd": { "100": { "checksum": "", "enabled": 1 } }, "gnome": { "100": { "checksum": "", "enabled": 1 } }, "gpg": { "100": { "checksum": "", "enabled": 1 } }, "gpm": { "100": { "checksum": "", "enabled": 1 } }, "gpsd": { "100": { "checksum": "", "enabled": 1 } }, "gssproxy": { "100": { "checksum": "", "enabled": 1 } }, "guest": { "100": { "checksum": "", "enabled": 1 } }, "hddtemp": { "100": { "checksum": "", "enabled": 1 } }, "hostname": { "100": { "checksum": "", "enabled": 1 } }, "hsqldb": { "100": { "checksum": "", "enabled": 1 } }, "hwloc": { "100": { "checksum": "", "enabled": 1 } }, "hypervkvp": { "100": { "checksum": "", "enabled": 1 } }, "icecast": { "100": { "checksum": "", "enabled": 1 } }, "inetd": { "100": { "checksum": "", "enabled": 1 } }, "init": { "100": { "checksum": "", "enabled": 1 } }, "inn": { "100": { "checksum": "", "enabled": 1 } }, "iodine": { "100": { "checksum": "", "enabled": 1 } }, "iotop": { "100": { "checksum": "", "enabled": 1 } }, "ipa": { "100": { "checksum": "", "enabled": 1 } }, "ipmievd": { "100": { "checksum": "", "enabled": 1 } }, "ipsec": { "100": { "checksum": "", "enabled": 1 } }, "iptables": { "100": { "checksum": "", "enabled": 1 } }, "irc": { "100": { "checksum": "", "enabled": 1 } }, "irqbalance": { "100": { "checksum": "", "enabled": 1 } }, "iscsi": { "100": { "checksum": "", "enabled": 1 } }, "isns": { "100": { "checksum": "", "enabled": 1 } }, "jabber": { "100": { "checksum": "", "enabled": 1 } }, "jetty": { "100": { "checksum": "", "enabled": 1 } }, "jockey": { "100": { "checksum": "", "enabled": 1 } }, "journalctl": { "100": { "checksum": "", "enabled": 1 } }, "kdump": { "100": { "checksum": "", "enabled": 1 } }, "kdumpgui": { "100": { "checksum": "", "enabled": 1 } }, "keepalived": { "100": { "checksum": "", "enabled": 1 } }, "kerberos": { "100": { "checksum": "", "enabled": 1 } }, "keyboardd": { "100": { "checksum": "", "enabled": 1 } }, "keystone": { "100": { "checksum": "", "enabled": 1 } }, "kismet": { "100": { "checksum": "", "enabled": 1 } }, "kmscon": { "100": { "checksum": "", "enabled": 1 } }, "kpatch": { "100": { "checksum": "", "enabled": 1 } }, "ksmtuned": { "100": { "checksum": "", "enabled": 1 } }, "ktalk": { "100": { "checksum": "", "enabled": 1 } }, "l2tp": { "100": { "checksum": "", "enabled": 1 } }, "ldap": { "100": { "checksum": "", "enabled": 1 } }, "libraries": { "100": { "checksum": "", "enabled": 1 } }, "likewise": { "100": { "checksum": "", "enabled": 1 } }, "linuxptp": { "100": { "checksum": "", "enabled": 1 } }, "lircd": { "100": { "checksum": "", "enabled": 1 } }, "livecd": { "100": { "checksum": "", "enabled": 1 } }, "lldpad": { "100": { "checksum": "", "enabled": 1 } }, "loadkeys": { "100": { "checksum": "", "enabled": 1 } }, "locallogin": { "100": { "checksum": "", "enabled": 1 } }, "lockdev": { "100": { "checksum": "", "enabled": 1 } }, "logadm": { "100": { "checksum": "", "enabled": 1 } }, "logging": { "100": { "checksum": "", "enabled": 1 } }, "logrotate": { "100": { "checksum": "", "enabled": 1 } }, "logwatch": { "100": { "checksum": "", "enabled": 1 } }, "lpd": { "100": { "checksum": "", "enabled": 1 } }, "lsm": { "100": { "checksum": "", "enabled": 1 } }, "lttng-tools": { "100": { "checksum": "", "enabled": 1 } }, "lvm": { "100": { "checksum": "", "enabled": 1 } }, "mailman": { "100": { "checksum": "", "enabled": 1 } }, "mailscanner": { "100": { "checksum": "", "enabled": 1 } }, "man2html": { "100": { "checksum": "", "enabled": 1 } }, "mandb": { "100": { "checksum": "", "enabled": 1 } }, "mcelog": { "100": { "checksum": "", "enabled": 1 } }, "mediawiki": { "100": { "checksum": "", "enabled": 1 } }, "memcached": { "100": { "checksum": "", "enabled": 1 } }, "milter": { "100": { "checksum": "", "enabled": 1 } }, "minidlna": { "100": { "checksum": "", "enabled": 1 } }, "minissdpd": { "100": { "checksum": "", "enabled": 1 } }, "mip6d": { "100": { "checksum": "", "enabled": 1 } }, "mirrormanager": { "100": { "checksum": "", "enabled": 1 } }, "miscfiles": { "100": { "checksum": "", "enabled": 1 } }, "mock": { "100": { "checksum": "", "enabled": 1 } }, "modemmanager": { "100": { "checksum": "", "enabled": 1 } }, "modutils": { "100": { "checksum": "", "enabled": 1 } }, "mojomojo": { "100": { "checksum": "", "enabled": 1 } }, "mon_statd": { "100": { "checksum": "", "enabled": 1 } }, "mongodb": { "100": { "checksum": "", "enabled": 1 } }, "motion": { "100": { "checksum": "", "enabled": 1 } }, "mount": { "100": { "checksum": "", "enabled": 1 } }, "mozilla": { "100": { "checksum": "", "enabled": 1 } }, "mpd": { "100": { "checksum": "", "enabled": 1 } }, "mplayer": { "100": { "checksum": "", "enabled": 1 } }, "mrtg": { "100": { "checksum": "", "enabled": 1 } }, "mta": { "100": { "checksum": "", "enabled": 1 } }, "munin": { "100": { "checksum": "", "enabled": 1 } }, "mysql": { "100": { "checksum": "", "enabled": 1 } }, "mythtv": { "100": { "checksum": "", "enabled": 1 } }, "nagios": { "100": { "checksum": "", "enabled": 1 } }, "namespace": { "100": { "checksum": "", "enabled": 1 } }, "ncftool": { "100": { "checksum": "", "enabled": 1 } }, "netlabel": { "100": { "checksum": "", "enabled": 1 } }, "netutils": { "100": { "checksum": "", "enabled": 1 } }, "networkmanager": { "100": { "checksum": "", "enabled": 1 } }, "ninfod": { "100": { "checksum": "", "enabled": 1 } }, "nis": { "100": { "checksum": "", "enabled": 1 } }, "nova": { "100": { "checksum": "", "enabled": 1 } }, "nscd": { "100": { "checksum": "", "enabled": 1 } }, "nsd": { "100": { "checksum": "", "enabled": 1 } }, "nslcd": { "100": { "checksum": "", "enabled": 1 } }, "ntop": { "100": { "checksum": "", "enabled": 1 } }, "ntp": { "100": { "checksum": "", "enabled": 1 } }, "numad": { "100": { "checksum": "", "enabled": 1 } }, "nut": { "100": { "checksum": "", "enabled": 1 } }, "nx": { "100": { "checksum": "", "enabled": 1 } }, "obex": { "100": { "checksum": "", "enabled": 1 } }, "oddjob": { "100": { "checksum": "", "enabled": 1 } }, "openct": { "100": { "checksum": "", "enabled": 1 } }, "opendnssec": { "100": { "checksum": "", "enabled": 1 } }, "openhpid": { "100": { "checksum": "", "enabled": 1 } }, "openshift": { "100": { "checksum": "", "enabled": 1 } }, "openshift-origin": { "100": { "checksum": "", "enabled": 1 } }, "opensm": { "100": { "checksum": "", "enabled": 1 } }, "openvpn": { "100": { "checksum": "", "enabled": 1 } }, "openvswitch": { "100": { "checksum": "", "enabled": 1 } }, "openwsman": { "100": { "checksum": "", "enabled": 1 } }, "oracleasm": { "100": { "checksum": "", "enabled": 1 } }, "osad": { "100": { "checksum": "", "enabled": 1 } }, "pads": { "100": { "checksum": "", "enabled": 1 } }, "passenger": { "100": { "checksum": "", "enabled": 1 } }, "pcmcia": { "100": { "checksum": "", "enabled": 1 } }, "pcp": { "100": { "checksum": "", "enabled": 1 } }, "pcscd": { "100": { "checksum": "", "enabled": 1 } }, "pegasus": { "100": { "checksum": "", "enabled": 1 } }, "permissivedomains": { "100": { "checksum": "", "enabled": 1 } }, "pesign": { "100": { "checksum": "", "enabled": 1 } }, "pingd": { "100": { "checksum": "", "enabled": 1 } }, "piranha": { "100": { "checksum": "", "enabled": 1 } }, "pkcs": { "100": { "checksum": "", "enabled": 1 } }, "pki": { "100": { "checksum": "", "enabled": 1 } }, "plymouthd": { "100": { "checksum": "", "enabled": 1 } }, "podsleuth": { "100": { "checksum": "", "enabled": 1 } }, "policykit": { "100": { "checksum": "", "enabled": 1 } }, "polipo": { "100": { "checksum": "", "enabled": 1 } }, "portmap": { "100": { "checksum": "", "enabled": 1 } }, "portreserve": { "100": { "checksum": "", "enabled": 1 } }, "postfix": { "100": { "checksum": "", "enabled": 1 } }, "postgresql": { "100": { "checksum": "", "enabled": 1 } }, "postgrey": { "100": { "checksum": "", "enabled": 1 } }, "ppp": { "100": { "checksum": "", "enabled": 1 } }, "prelink": { "100": { "checksum": "", "enabled": 1 } }, "prelude": { "100": { "checksum": "", "enabled": 1 } }, "privoxy": { "100": { "checksum": "", "enabled": 1 } }, "procmail": { "100": { "checksum": "", "enabled": 1 } }, "prosody": { "100": { "checksum": "", "enabled": 1 } }, "psad": { "100": { "checksum": "", "enabled": 1 } }, "ptchown": { "100": { "checksum": "", "enabled": 1 } }, "publicfile": { "100": { "checksum": "", "enabled": 1 } }, "pulseaudio": { "100": { "checksum": "", "enabled": 1 } }, "puppet": { "100": { "checksum": "", "enabled": 1 } }, "pwauth": { "100": { "checksum": "", "enabled": 1 } }, "qmail": { "100": { "checksum": "", "enabled": 1 } }, "qpid": { "100": { "checksum": "", "enabled": 1 } }, "quantum": { "100": { "checksum": "", "enabled": 1 } }, "quota": { "100": { "checksum": "", "enabled": 1 } }, "rabbitmq": { "100": { "checksum": "", "enabled": 1 } }, "radius": { "100": { "checksum": "", "enabled": 1 } }, "radvd": { "100": { "checksum": "", "enabled": 1 } }, "raid": { "100": { "checksum": "", "enabled": 1 } }, "rasdaemon": { "100": { "checksum": "", "enabled": 1 } }, "rdisc": { "100": { "checksum": "", "enabled": 1 } }, "readahead": { "100": { "checksum": "", "enabled": 1 } }, "realmd": { "100": { "checksum": "", "enabled": 1 } }, "redis": { "100": { "checksum": "", "enabled": 1 } }, "remotelogin": { "100": { "checksum": "", "enabled": 1 } }, "restraint": { "400": { "checksum": "", "enabled": 1 } }, "rhcs": { "100": { "checksum": "", "enabled": 1 } }, "rhev": { "100": { "checksum": "", "enabled": 1 } }, "rhgb": { "100": { "checksum": "", "enabled": 1 } }, "rhnsd": { "100": { "checksum": "", "enabled": 1 } }, "rhsmcertd": { "100": { "checksum": "", "enabled": 1 } }, "rhts": { "400": { "checksum": "", "enabled": 1 } }, "ricci": { "100": { "checksum": "", "enabled": 1 } }, "rkhunter": { "100": { "checksum": "", "enabled": 1 } }, "rlogin": { "100": { "checksum": "", "enabled": 1 } }, "rngd": { "100": { "checksum": "", "enabled": 1 } }, "roundup": { "100": { "checksum": "", "enabled": 1 } }, "rpc": { "100": { "checksum": "", "enabled": 1 } }, "rpcbind": { "100": { "checksum": "", "enabled": 1 } }, "rpm": { "100": { "checksum": "", "enabled": 1 } }, "rshd": { "100": { "checksum": "", "enabled": 1 } }, "rssh": { "100": { "checksum": "", "enabled": 1 } }, "rsync": { "100": { "checksum": "", "enabled": 1 } }, "rtas": { "100": { "checksum": "", "enabled": 1 } }, "rtkit": { "100": { "checksum": "", "enabled": 1 } }, "rwho": { "100": { "checksum": "", "enabled": 1 } }, "samba": { "100": { "checksum": "", "enabled": 1 } }, "sambagui": { "100": { "checksum": "", "enabled": 1 } }, "sandboxX": { "100": { "checksum": "", "enabled": 1 } }, "sanlock": { "100": { "checksum": "", "enabled": 1 } }, "sasl": { "100": { "checksum": "", "enabled": 1 } }, "sbd": { "100": { "checksum": "", "enabled": 1 } }, "sblim": { "100": { "checksum": "", "enabled": 1 } }, "screen": { "100": { "checksum": "", "enabled": 1 } }, "secadm": { "100": { "checksum": "", "enabled": 1 } }, "sectoolm": { "100": { "checksum": "", "enabled": 1 } }, "selinuxutil": { "100": { "checksum": "", "enabled": 1 } }, "sendmail": { "100": { "checksum": "", "enabled": 1 } }, "sensord": { "100": { "checksum": "", "enabled": 1 } }, "setrans": { "100": { "checksum": "", "enabled": 1 } }, "setroubleshoot": { "100": { "checksum": "", "enabled": 1 } }, "seunshare": { "100": { "checksum": "", "enabled": 1 } }, "sge": { "100": { "checksum": "", "enabled": 1 } }, "shorewall": { "100": { "checksum": "", "enabled": 1 } }, "slocate": { "100": { "checksum": "", "enabled": 1 } }, "slpd": { "100": { "checksum": "", "enabled": 1 } }, "smartmon": { "100": { "checksum": "", "enabled": 1 } }, "smokeping": { "100": { "checksum": "", "enabled": 1 } }, "smoltclient": { "100": { "checksum": "", "enabled": 1 } }, "smsd": { "100": { "checksum": "", "enabled": 1 } }, "snapper": { "100": { "checksum": "", "enabled": 1 } }, "snmp": { "100": { "checksum": "", "enabled": 1 } }, "snort": { "100": { "checksum": "", "enabled": 1 } }, "sosreport": { "100": { "checksum": "", "enabled": 1 } }, "soundserver": { "100": { "checksum": "", "enabled": 1 } }, "spamassassin": { "100": { "checksum": "", "enabled": 1 } }, "speech-dispatcher": { "100": { "checksum": "", "enabled": 1 } }, "squid": { "100": { "checksum": "", "enabled": 1 } }, "ssh": { "100": { "checksum": "", "enabled": 1 } }, "sssd": { "100": { "checksum": "", "enabled": 1 } }, "staff": { "100": { "checksum": "", "enabled": 1 } }, "stapserver": { "100": { "checksum": "", "enabled": 1 } }, "stunnel": { "100": { "checksum": "", "enabled": 1 } }, "su": { "100": { "checksum": "", "enabled": 1 } }, "sudo": { "100": { "checksum": "", "enabled": 1 } }, "svnserve": { "100": { "checksum": "", "enabled": 1 } }, "swift": { "100": { "checksum": "", "enabled": 1 } }, "sysadm": { "100": { "checksum": "", "enabled": 1 } }, "sysadm_secadm": { "100": { "checksum": "", "enabled": 1 } }, "sysnetwork": { "100": { "checksum": "", "enabled": 1 } }, "sysstat": { "100": { "checksum": "", "enabled": 1 } }, "systemd": { "100": { "checksum": "", "enabled": 1 } }, "tangd": { "100": { "checksum": "", "enabled": 1 } }, "targetd": { "100": { "checksum": "", "enabled": 1 } }, "tcpd": { "100": { "checksum": "", "enabled": 1 } }, "tcsd": { "100": { "checksum": "", "enabled": 1 } }, "telepathy": { "100": { "checksum": "", "enabled": 1 } }, "telnet": { "100": { "checksum": "", "enabled": 1 } }, "tftp": { "100": { "checksum": "", "enabled": 1 } }, "tgtd": { "100": { "checksum": "", "enabled": 1 } }, "thin": { "100": { "checksum": "", "enabled": 1 } }, "thumb": { "100": { "checksum": "", "enabled": 1 } }, "tlp": { "100": { "checksum": "", "enabled": 1 } }, "tmpreaper": { "100": { "checksum": "", "enabled": 1 } }, "tomcat": { "100": { "checksum": "", "enabled": 1 } }, "tor": { "100": { "checksum": "", "enabled": 1 } }, "tuned": { "100": { "checksum": "", "enabled": 1 } }, "tvtime": { "100": { "checksum": "", "enabled": 1 } }, "udev": { "100": { "checksum": "", "enabled": 1 } }, "ulogd": { "100": { "checksum": "", "enabled": 1 } }, "uml": { "100": { "checksum": "", "enabled": 1 } }, "unconfined": { "100": { "checksum": "", "enabled": 1 } }, "unconfineduser": { "100": { "checksum": "", "enabled": 1 } }, "unlabelednet": { "100": { "checksum": "", "enabled": 1 } }, "unprivuser": { "100": { "checksum": "", "enabled": 1 } }, "updfstab": { "100": { "checksum": "", "enabled": 1 } }, "usbmodules": { "100": { "checksum": "", "enabled": 1 } }, "usbmuxd": { "100": { "checksum": "", "enabled": 1 } }, "userdomain": { "100": { "checksum": "", "enabled": 1 } }, "userhelper": { "100": { "checksum": "", "enabled": 1 } }, "usermanage": { "100": { "checksum": "", "enabled": 1 } }, "usernetctl": { "100": { "checksum": "", "enabled": 1 } }, "uucp": { "100": { "checksum": "", "enabled": 1 } }, "uuidd": { "100": { "checksum": "", "enabled": 1 } }, "varnishd": { "100": { "checksum": "", "enabled": 1 } }, "vdagent": { "100": { "checksum": "", "enabled": 1 } }, "vhostmd": { "100": { "checksum": "", "enabled": 1 } }, "virt": { "100": { "checksum": "", "enabled": 1 } }, "vlock": { "100": { "checksum": "", "enabled": 1 } }, "vmtools": { "100": { "checksum": "", "enabled": 1 } }, "vmware": { "100": { "checksum": "", "enabled": 1 } }, "vnstatd": { "100": { "checksum": "", "enabled": 1 } }, "vpn": { "100": { "checksum": "", "enabled": 1 } }, "w3c": { "100": { "checksum": "", "enabled": 1 } }, "watchdog": { "100": { "checksum": "", "enabled": 1 } }, "wdmd": { "100": { "checksum": "", "enabled": 1 } }, "webadm": { "100": { "checksum": "", "enabled": 1 } }, "webalizer": { "100": { "checksum": "", "enabled": 1 } }, "wine": { "100": { "checksum": "", "enabled": 1 } }, "wireshark": { "100": { "checksum": "", "enabled": 1 } }, "xen": { "100": { "checksum": "", "enabled": 1 } }, "xguest": { "100": { "checksum": "", "enabled": 1 } }, "xserver": { "100": { "checksum": "", "enabled": 1 } }, "zabbix": { "100": { "checksum": "", "enabled": 1 } }, "zarafa": { "100": { "checksum": "", "enabled": 1 } }, "zebra": { "100": { "checksum": "", "enabled": 1 } }, "zoneminder": { "100": { "checksum": "", "enabled": 1 } }, "zosremote": { "100": { "checksum": "", "enabled": 1 } } }, "selinux_priorities": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Load SELinux modules] **************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:162 Saturday 08 November 2025 14:22:25 -0500 (0:00:02.740) 0:01:40.098 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:175 Saturday 08 November 2025 14:22:25 -0500 (0:00:00.060) 0:01:40.159 ***** TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree in check mode] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:183 Saturday 08 November 2025 14:22:25 -0500 (0:00:00.056) 0:01:40.215 ***** TASK [Register (wrong host, wrong port)] *************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tests_proxy.yml:39 Saturday 08 November 2025 14:22:25 -0500 (0:00:00.056) 0:01:40.272 ***** TASK [fedora.linux_system_roles.rhc : Set ansible_facts required by role] ****** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/main.yml:3 Saturday 08 November 2025 14:22:25 -0500 (0:00:00.107) 0:01:40.380 ***** included: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.rhc : Ensure ansible_facts used by role] ******* task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:3 Saturday 08 November 2025 14:22:26 -0500 (0:00:00.117) 0:01:40.497 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Check if system is ostree] *************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:11 Saturday 08 November 2025 14:22:26 -0500 (0:00:00.067) 0:01:40.565 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Set flag to indicate system is ostree] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:16 Saturday 08 November 2025 14:22:26 -0500 (0:00:00.058) 0:01:40.623 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Check if insights-packages are installed] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:20 Saturday 08 November 2025 14:22:26 -0500 (0:00:00.058) 0:01:40.681 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Handle insights unregistration] ********** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/main.yml:6 Saturday 08 November 2025 14:22:26 -0500 (0:00:00.057) 0:01:40.740 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Handle system subscription] ************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/main.yml:15 Saturday 08 November 2025 14:22:26 -0500 (0:00:00.058) 0:01:40.798 ***** included: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml for managed-node1 TASK [fedora.linux_system_roles.rhc : Ensure required packages are installed] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:3 Saturday 08 November 2025 14:22:26 -0500 (0:00:00.074) 0:01:40.873 ***** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [ "subscription-manager-1.24.54-1.el7.centos.x86_64 providing subscription-manager is already installed" ] } lsrpackages: subscription-manager TASK [fedora.linux_system_roles.rhc : Get subscription status] ***************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:10 Saturday 08 November 2025 14:22:27 -0500 (0:00:00.896) 0:01:41.769 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Call subscription-manager] *************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:23 Saturday 08 November 2025 14:22:27 -0500 (0:00:00.073) 0:01:41.843 ***** fatal: [managed-node1]: FAILED! => {} MSG: Unable to import community.general.redhat_subscription due to invalid syntax TASK [Assert registration failed] ********************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tests_proxy.yml:63 Saturday 08 November 2025 14:22:27 -0500 (0:00:00.196) 0:01:42.039 ***** ok: [managed-node1] => { "changed": false } MSG: All assertions passed TASK [Register (wrong host)] *************************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tests_proxy.yml:69 Saturday 08 November 2025 14:22:27 -0500 (0:00:00.069) 0:01:42.108 ***** TASK [fedora.linux_system_roles.rhc : Set ansible_facts required by role] ****** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/main.yml:3 Saturday 08 November 2025 14:22:27 -0500 (0:00:00.102) 0:01:42.211 ***** included: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.rhc : Ensure ansible_facts used by role] ******* task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:3 Saturday 08 November 2025 14:22:27 -0500 (0:00:00.110) 0:01:42.322 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Check if system is ostree] *************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:11 Saturday 08 November 2025 14:22:27 -0500 (0:00:00.103) 0:01:42.426 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Set flag to indicate system is ostree] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:16 Saturday 08 November 2025 14:22:28 -0500 (0:00:00.088) 0:01:42.514 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Check if insights-packages are installed] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:20 Saturday 08 November 2025 14:22:28 -0500 (0:00:00.088) 0:01:42.603 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Handle insights unregistration] ********** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/main.yml:6 Saturday 08 November 2025 14:22:28 -0500 (0:00:00.081) 0:01:42.684 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Handle system subscription] ************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/main.yml:15 Saturday 08 November 2025 14:22:28 -0500 (0:00:00.071) 0:01:42.756 ***** included: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml for managed-node1 TASK [fedora.linux_system_roles.rhc : Ensure required packages are installed] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:3 Saturday 08 November 2025 14:22:28 -0500 (0:00:00.129) 0:01:42.885 ***** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [ "subscription-manager-1.24.54-1.el7.centos.x86_64 providing subscription-manager is already installed" ] } lsrpackages: subscription-manager TASK [fedora.linux_system_roles.rhc : Get subscription status] ***************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:10 Saturday 08 November 2025 14:22:29 -0500 (0:00:00.863) 0:01:43.749 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Call subscription-manager] *************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:23 Saturday 08 November 2025 14:22:29 -0500 (0:00:00.090) 0:01:43.840 ***** fatal: [managed-node1]: FAILED! => {} MSG: Unable to import community.general.redhat_subscription due to invalid syntax TASK [Assert registration failed] ********************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tests_proxy.yml:93 Saturday 08 November 2025 14:22:29 -0500 (0:00:00.301) 0:01:44.142 ***** ok: [managed-node1] => { "changed": false } MSG: All assertions passed TASK [Register (wrong port)] *************************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tests_proxy.yml:99 Saturday 08 November 2025 14:22:29 -0500 (0:00:00.090) 0:01:44.232 ***** TASK [fedora.linux_system_roles.rhc : Set ansible_facts required by role] ****** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/main.yml:3 Saturday 08 November 2025 14:22:29 -0500 (0:00:00.082) 0:01:44.314 ***** included: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.rhc : Ensure ansible_facts used by role] ******* task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:3 Saturday 08 November 2025 14:22:29 -0500 (0:00:00.079) 0:01:44.393 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Check if system is ostree] *************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:11 Saturday 08 November 2025 14:22:30 -0500 (0:00:00.065) 0:01:44.458 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Set flag to indicate system is ostree] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:16 Saturday 08 November 2025 14:22:30 -0500 (0:00:00.056) 0:01:44.515 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Check if insights-packages are installed] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:20 Saturday 08 November 2025 14:22:30 -0500 (0:00:00.065) 0:01:44.581 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Handle insights unregistration] ********** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/main.yml:6 Saturday 08 November 2025 14:22:30 -0500 (0:00:00.074) 0:01:44.655 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Handle system subscription] ************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/main.yml:15 Saturday 08 November 2025 14:22:30 -0500 (0:00:00.055) 0:01:44.711 ***** included: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml for managed-node1 TASK [fedora.linux_system_roles.rhc : Ensure required packages are installed] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:3 Saturday 08 November 2025 14:22:30 -0500 (0:00:00.072) 0:01:44.783 ***** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [ "subscription-manager-1.24.54-1.el7.centos.x86_64 providing subscription-manager is already installed" ] } lsrpackages: subscription-manager TASK [fedora.linux_system_roles.rhc : Get subscription status] ***************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:10 Saturday 08 November 2025 14:22:31 -0500 (0:00:00.891) 0:01:45.675 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Call subscription-manager] *************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:23 Saturday 08 November 2025 14:22:31 -0500 (0:00:00.087) 0:01:45.763 ***** fatal: [managed-node1]: FAILED! => {} MSG: Unable to import community.general.redhat_subscription due to invalid syntax TASK [Assert registration failed] ********************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tests_proxy.yml:123 Saturday 08 November 2025 14:22:31 -0500 (0:00:00.289) 0:01:46.052 ***** ok: [managed-node1] => { "changed": false } MSG: All assertions passed TASK [Register (no authentication)] ******************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tests_proxy.yml:127 Saturday 08 November 2025 14:22:31 -0500 (0:00:00.061) 0:01:46.114 ***** TASK [fedora.linux_system_roles.rhc : Set ansible_facts required by role] ****** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/main.yml:3 Saturday 08 November 2025 14:22:31 -0500 (0:00:00.239) 0:01:46.353 ***** included: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.rhc : Ensure ansible_facts used by role] ******* task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:3 Saturday 08 November 2025 14:22:32 -0500 (0:00:00.110) 0:01:46.464 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Check if system is ostree] *************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:11 Saturday 08 November 2025 14:22:32 -0500 (0:00:00.100) 0:01:46.565 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Set flag to indicate system is ostree] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:16 Saturday 08 November 2025 14:22:32 -0500 (0:00:00.091) 0:01:46.657 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Check if insights-packages are installed] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:20 Saturday 08 November 2025 14:22:32 -0500 (0:00:00.090) 0:01:46.748 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Handle insights unregistration] ********** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/main.yml:6 Saturday 08 November 2025 14:22:32 -0500 (0:00:00.089) 0:01:46.837 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Handle system subscription] ************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/main.yml:15 Saturday 08 November 2025 14:22:32 -0500 (0:00:00.092) 0:01:46.929 ***** included: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml for managed-node1 TASK [fedora.linux_system_roles.rhc : Ensure required packages are installed] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:3 Saturday 08 November 2025 14:22:32 -0500 (0:00:00.135) 0:01:47.064 ***** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [ "subscription-manager-1.24.54-1.el7.centos.x86_64 providing subscription-manager is already installed" ] } lsrpackages: subscription-manager TASK [fedora.linux_system_roles.rhc : Get subscription status] ***************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:10 Saturday 08 November 2025 14:22:33 -0500 (0:00:00.924) 0:01:47.989 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Call subscription-manager] *************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:23 Saturday 08 November 2025 14:22:33 -0500 (0:00:00.071) 0:01:48.061 ***** fatal: [managed-node1]: FAILED! => {} MSG: Unable to import community.general.redhat_subscription due to invalid syntax TASK [Unregister] ************************************************************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tests_proxy.yml:353 Saturday 08 November 2025 14:22:33 -0500 (0:00:00.227) 0:01:48.289 ***** TASK [fedora.linux_system_roles.rhc : Set ansible_facts required by role] ****** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/main.yml:3 Saturday 08 November 2025 14:22:34 -0500 (0:00:00.271) 0:01:48.560 ***** included: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.rhc : Ensure ansible_facts used by role] ******* task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:3 Saturday 08 November 2025 14:22:34 -0500 (0:00:00.191) 0:01:48.752 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Check if system is ostree] *************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:11 Saturday 08 November 2025 14:22:34 -0500 (0:00:00.104) 0:01:48.856 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Set flag to indicate system is ostree] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:16 Saturday 08 November 2025 14:22:34 -0500 (0:00:00.090) 0:01:48.946 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Check if insights-packages are installed] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:20 Saturday 08 November 2025 14:22:34 -0500 (0:00:00.090) 0:01:49.037 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Handle insights unregistration] ********** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/main.yml:6 Saturday 08 November 2025 14:22:34 -0500 (0:00:00.093) 0:01:49.130 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Handle system subscription] ************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/main.yml:15 Saturday 08 November 2025 14:22:34 -0500 (0:00:00.093) 0:01:49.224 ***** included: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml for managed-node1 TASK [fedora.linux_system_roles.rhc : Ensure required packages are installed] *** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:3 Saturday 08 November 2025 14:22:34 -0500 (0:00:00.125) 0:01:49.349 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Get subscription status] ***************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:10 Saturday 08 November 2025 14:22:34 -0500 (0:00:00.102) 0:01:49.451 ***** skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Call subscription-manager] *************** task path: /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:23 Saturday 08 November 2025 14:22:35 -0500 (0:00:00.086) 0:01:49.538 ***** fatal: [managed-node1]: FAILED! => {} MSG: Unable to import community.general.redhat_subscription due to invalid syntax PLAY RECAP ********************************************************************* managed-node1 : ok=58 changed=9 unreachable=0 failed=2 skipped=66 rescued=3 ignored=0 SYSTEM ROLES ERRORS BEGIN v1 [ { "ansible_version": "2.9.27", "end_time": "2025-11-08T19:22:27.555847Z", "host": "managed-node1", "message": "Unable to import community.general.redhat_subscription due to invalid syntax", "start_time": "2025-11-08T19:22:27.388231Z", "task_name": "Call subscription-manager", "task_path": "/tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:23" }, { "ansible_version": "2.9.27", "end_time": "2025-11-08T19:22:29.641633Z", "host": "managed-node1", "message": "Unable to import community.general.redhat_subscription due to invalid syntax", "start_time": "2025-11-08T19:22:29.384993Z", "task_name": "Call subscription-manager", "task_path": "/tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:23" }, { "ansible_version": "2.9.27", "end_time": "2025-11-08T19:22:31.512078Z", "host": "managed-node1", "message": "Unable to import community.general.redhat_subscription due to invalid syntax", "start_time": "2025-11-08T19:22:31.307843Z", "task_name": "Call subscription-manager", "task_path": "/tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:23" }, { "ansible_version": "2.9.27", "end_time": "2025-11-08T19:22:33.793691Z", "host": "managed-node1", "message": "Unable to import community.general.redhat_subscription due to invalid syntax", "start_time": "2025-11-08T19:22:33.605667Z", "task_name": "Call subscription-manager", "task_path": "/tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:23" }, { "ansible_version": "2.9.27", "end_time": "2025-11-08T19:22:35.264700Z", "host": "managed-node1", "message": "Unable to import community.general.redhat_subscription due to invalid syntax", "start_time": "2025-11-08T19:22:35.083500Z", "task_name": "Call subscription-manager", "task_path": "/tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:23" } ] SYSTEM ROLES ERRORS END v1 TASKS RECAP ******************************************************************** Saturday 08 November 2025 14:22:35 -0500 (0:00:00.184) 0:01:49.723 ***** =============================================================================== Start Candlepin container ---------------------------------------------- 26.39s /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:41 Install needed packages ------------------------------------------------ 19.20s /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:31 Wait for started Candlepin --------------------------------------------- 17.09s /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:108 fedora.linux_system_roles.selinux : Set an SELinux label on a port ------ 7.13s /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:134 Install packages for squid ---------------------------------------------- 6.04s /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_squid.yml:7 Update system certificates store ---------------------------------------- 3.41s /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:101 fedora.linux_system_roles.selinux : Get SELinux modules facts ----------- 2.74s /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:159 Check if the candlepin container exists --------------------------------- 2.32s /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/teardown_candlepin.yml:6 Ensure directories exist ------------------------------------------------ 2.03s /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:62 Copy Candlepin CA certificate for system -------------------------------- 1.46s /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:92 Copy product certificates ----------------------------------------------- 1.39s /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:72 Copy Candlepin CA certificate for subscription-manager ------------------ 1.27s /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:83 Restart squid ----------------------------------------------------------- 0.96s /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_squid.yml:119 fedora.linux_system_roles.rhc : Ensure required packages are installed --- 0.92s /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:3 fedora.linux_system_roles.rhc : Ensure required packages are installed --- 0.90s /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:3 fedora.linux_system_roles.rhc : Ensure required packages are installed --- 0.89s /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:3 fedora.linux_system_roles.rhc : Ensure required packages are installed --- 0.86s /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:3 fedora.linux_system_roles.selinux : Install SELinux python2 tools ------- 0.85s /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:26 Get facts for external test data ---------------------------------------- 0.60s /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:9 Install GPG key for RPM repositories ------------------------------------ 0.55s /tmp/collections-bdM/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:118 -- Logs begin at Sat 2025-11-08 14:16:41 EST, end at Sat 2025-11-08 14:22:35 EST. -- Nov 08 14:20:44 managed-node1 ansible-stat[9857]: Invoked with checksum_algorithm=sha1 get_checksum=True follow=False path=/run/ostree-booted get_md5=False get_mime=True get_attributes=True Nov 08 14:20:44 managed-node1 sshd[9866]: Accepted publickey for root from 10.31.41.167 port 44666 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Nov 08 14:20:44 managed-node1 systemd-logind[506]: New session 14 of user root. -- Subject: A new session 14 has been created for user root -- Defined-By: systemd -- Support: http://lists.freedesktop.org/mailman/listinfo/systemd-devel -- Documentation: http://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A new session with the ID 14 has been created for the user root. -- -- The leading process of the session is 9866. Nov 08 14:20:44 managed-node1 systemd[1]: Started Session 14 of user root. -- Subject: Unit session-14.scope has finished start-up -- Defined-By: systemd -- Support: http://lists.freedesktop.org/mailman/listinfo/systemd-devel -- -- Unit session-14.scope has finished starting up. -- -- The start-up result is done. Nov 08 14:20:44 managed-node1 sshd[9866]: pam_unix(sshd:session): session opened for user root by (uid=0) Nov 08 14:20:44 managed-node1 sshd[9866]: Received disconnect from 10.31.41.167 port 44666:11: disconnected by user Nov 08 14:20:44 managed-node1 sshd[9866]: Disconnected from 10.31.41.167 port 44666 Nov 08 14:20:44 managed-node1 sshd[9866]: pam_unix(sshd:session): session closed for user root Nov 08 14:20:44 managed-node1 systemd-logind[506]: Removed session 14. -- Subject: Session 14 has been terminated -- Defined-By: systemd -- Support: http://lists.freedesktop.org/mailman/listinfo/systemd-devel -- Documentation: http://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A session with the ID 14 has been terminated. Nov 08 14:20:46 managed-node1 sudo[9930]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-saxtrcvowdyrmkjtbvqpqdihkrosssmn ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629645.78-11398-101277717142113/AnsiballZ_stat.py Nov 08 14:20:46 managed-node1 sudo[9930]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:20:46 managed-node1 ansible-stat[9933]: Invoked with checksum_algorithm=sha1 get_checksum=True follow=False path=/run/ostree-booted get_md5=False get_mime=True get_attributes=True Nov 08 14:20:46 managed-node1 sudo[9930]: pam_unix(sudo:session): session closed for user root Nov 08 14:20:46 managed-node1 sudo[9982]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-wgmdqxvzshnbicwihxftxggqhmwdlheh ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629646.29-11412-255334852045750/AnsiballZ_setup.py Nov 08 14:20:46 managed-node1 sudo[9982]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:20:46 managed-node1 ansible-setup[9985]: Invoked with filter=* gather_subset=['!all', '!min', 'distribution', 'distribution_major_version'] fact_path=/etc/ansible/facts.d gather_timeout=10 Nov 08 14:20:46 managed-node1 sudo[9982]: pam_unix(sudo:session): session closed for user root Nov 08 14:20:47 managed-node1 sudo[10038]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-uuiphyfefyxsnhnrifaarutknzbsuivl ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629646.99-11432-139588058483615/AnsiballZ_lineinfile.py Nov 08 14:20:47 managed-node1 sudo[10038]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:20:47 managed-node1 ansible-lineinfile[10041]: Invoked with directory_mode=None force=None remote_src=None backrefs=False insertafter=None path=/etc/hosts owner=None follow=False validate=None group=None insertbefore=None unsafe_writes=False create=False state=present content=NOT_LOGGING_PARAMETER serole=None setype=None selevel=None regexp=.*candlepin.local line=127.0.0.1 candlepin.local src=None seuser=None delimiter=None mode=None firstmatch=False attributes=None backup=False Nov 08 14:20:47 managed-node1 sudo[10038]: pam_unix(sudo:session): session closed for user root Nov 08 14:20:47 managed-node1 sudo[10090]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-mjknxesuqnrzuwkjxdswrsrqptrgkqlo ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629647.38-11441-236005554046673/AnsiballZ_setup.py Nov 08 14:20:47 managed-node1 sudo[10090]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:20:47 managed-node1 ansible-setup[10093]: Invoked with filter=ansible_pkg_mgr gather_subset=['!all'] fact_path=/etc/ansible/facts.d gather_timeout=10 Nov 08 14:20:47 managed-node1 sudo[10090]: pam_unix(sudo:session): session closed for user root Nov 08 14:20:47 managed-node1 sudo[10123]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-ugeyxmlivkkjzmddyvelslmcjgmonynr ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629647.38-11441-236005554046673/AnsiballZ_yum.py Nov 08 14:20:47 managed-node1 sudo[10123]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:20:48 managed-node1 ansible-yum[10126]: Invoked with lock_timeout=30 update_cache=False disable_excludes=None exclude=[] allow_downgrade=False disable_gpg_check=False conf_file=None use_backend=auto state=present disablerepo=[] releasever=None skip_broken=False autoremove=False download_dir=None enable_plugin=[] installroot=/ install_weak_deps=True name=['podman'] download_only=False bugfix=False list=None install_repoquery=True update_only=False disable_plugin=[] enablerepo=[] security=False validate_certs=True Nov 08 14:20:52 managed-node1 dbus[509]: [system] Reloaded configuration Nov 08 14:20:52 managed-node1 setsebool[10164]: The virt_use_nfs policy boolean was changed to 1 by root Nov 08 14:20:52 managed-node1 setsebool[10164]: The virt_sandbox_use_all_caps policy boolean was changed to 1 by root Nov 08 14:20:59 managed-node1 kernel: SELinux: 2048 avtab hash slots, 113365 rules. Nov 08 14:20:59 managed-node1 kernel: SELinux: 2048 avtab hash slots, 113365 rules. Nov 08 14:20:59 managed-node1 kernel: SELinux: 8 users, 14 roles, 5054 types, 318 bools, 1 sens, 1024 cats Nov 08 14:20:59 managed-node1 kernel: SELinux: 130 classes, 113365 rules Nov 08 14:20:59 managed-node1 kernel: SELinux: Converting 2282 SID table entries... Nov 08 14:21:01 managed-node1 dbus[509]: [system] Reloaded configuration Nov 08 14:21:01 managed-node1 yum[10137]: Installed: 2:container-selinux-2.119.2-1.911c772.el7_8.noarch Nov 08 14:21:01 managed-node1 yum[10137]: Installed: slirp4netns-0.4.3-4.el7_8.x86_64 Nov 08 14:21:03 managed-node1 yum[10137]: Installed: containernetworking-plugins-0.8.3-3.el7.centos.x86_64 Nov 08 14:21:03 managed-node1 yum[10137]: Installed: libnftnl-1.0.8-3.el7.x86_64 Nov 08 14:21:03 managed-node1 systemd[1]: Reloading. Nov 08 14:21:03 managed-node1 yum[10137]: Installed: 1:nftables-0.8-14.el7.x86_64 Nov 08 14:21:03 managed-node1 yum[10137]: Installed: 2:conmon-2.0.8-1.el7.x86_64 Nov 08 14:21:04 managed-node1 yum[10137]: Installed: fuse3-libs-3.6.1-4.el7.x86_64 Nov 08 14:21:04 managed-node1 yum[10137]: Installed: fuse-overlayfs-0.7.2-6.el7_8.x86_64 Nov 08 14:21:04 managed-node1 yum[10137]: Installed: 1:containers-common-0.1.40-11.el7_8.x86_64 Nov 08 14:21:04 managed-node1 yum[10137]: Installed: protobuf-c-1.0.2-3.el7.x86_64 Nov 08 14:21:04 managed-node1 yum[10137]: Installed: libnet-1.1.6-7.el7.x86_64 Nov 08 14:21:04 managed-node1 yum[10137]: Installed: criu-3.12-2.el7.x86_64 Nov 08 14:21:04 managed-node1 yum[10137]: Installed: runc-1.0.0-70.rc10.el7_9.x86_64 Nov 08 14:21:06 managed-node1 yum[10137]: Installed: podman-1.6.4-36.el7_9.x86_64 Nov 08 14:21:06 managed-node1 sudo[10123]: pam_unix(sudo:session): session closed for user root Nov 08 14:21:06 managed-node1 sudo[10254]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-ceedqyarfhdplmnxlnkkweawjhwrpltq ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629666.62-11611-14822679607334/AnsiballZ_command.py Nov 08 14:21:06 managed-node1 sudo[10254]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:21:06 managed-node1 ansible-command[10257]: Invoked with creates=None executable=None _uses_shell=False strip_empty_ends=True _raw_params=None removes=None argv=['podman', 'ps', '-a', '--filter', 'name=candlepin'] warn=True chdir=None stdin_add_newline=True stdin=None Nov 08 14:21:07 managed-node1 kernel: TECH PREVIEW: Overlay filesystem may not be fully supported. Please review provided documentation for limitations. Nov 08 14:21:08 managed-node1 podman[10258]: 2025-11-08 14:21:08.822841125 -0500 EST m=+1.852210541 system refresh Nov 08 14:21:08 managed-node1 sudo[10254]: pam_unix(sudo:session): session closed for user root Nov 08 14:21:09 managed-node1 sudo[10324]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-fvxcafysnbcjltlvlpsumwqxplbmbhin ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629668.98-11636-107286923209575/AnsiballZ_command.py Nov 08 14:21:09 managed-node1 sudo[10324]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:21:09 managed-node1 ansible-command[10327]: Invoked with creates=None executable=None _uses_shell=False strip_empty_ends=True _raw_params=None removes=None argv=['podman', 'run', '--rm', '--detach', '--hostname', 'candlepin.local', '--name', 'candlepin', '--publish', '8443:8443', '--publish', '8080:8080', '--privileged', 'ghcr.io/candlepin/candlepin-unofficial'] warn=True chdir=None stdin_add_newline=True stdin=None Nov 08 14:21:34 managed-node1 podman[10328]: 2025-11-08 14:21:34.817718703 -0500 EST m=+25.616830370 image pull Nov 08 14:21:34 managed-node1 podman[10328]: 2025-11-08 14:21:34.847200893 -0500 EST m=+25.646312307 container create a99b6d77852abd277ac5765870cbf5196206324e7d881325ff3e0efcb98c30fc (image=ghcr.io/candlepin/candlepin-unofficial:latest, name=candlepin) Nov 08 14:21:34 managed-node1 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Nov 08 14:21:34 managed-node1 NetworkManager[576]: [1762629694.8830] manager: (cni-podman0): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Nov 08 14:21:34 managed-node1 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready Nov 08 14:21:34 managed-node1 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready Nov 08 14:21:34 managed-node1 kernel: cni-podman0: port 1(veth538c3cdf) entered blocking state Nov 08 14:21:34 managed-node1 kernel: cni-podman0: port 1(veth538c3cdf) entered disabled state Nov 08 14:21:34 managed-node1 kernel: device veth538c3cdf entered promiscuous mode Nov 08 14:21:34 managed-node1 kernel: cni-podman0: port 1(veth538c3cdf) entered blocking state Nov 08 14:21:34 managed-node1 kernel: cni-podman0: port 1(veth538c3cdf) entered forwarding state Nov 08 14:21:34 managed-node1 NetworkManager[576]: [1762629694.9014] device (veth538c3cdf): carrier: link connected Nov 08 14:21:34 managed-node1 NetworkManager[576]: [1762629694.9017] manager: (veth538c3cdf): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Nov 08 14:21:34 managed-node1 NetworkManager[576]: [1762629694.9025] device (cni-podman0): carrier: link connected Nov 08 14:21:34 managed-node1 NetworkManager[576]: [1762629694.9025] device (cni-podman0): state change: unmanaged -> unavailable (reason 'connection-assumed', sys-iface-state: 'external') Nov 08 14:21:34 managed-node1 NetworkManager[576]: [1762629694.9054] ifcfg-rh: add connection in-memory (95f62582-884a-4db2-a820-cae249f5fef9,"cni-podman0") Nov 08 14:21:34 managed-node1 NetworkManager[576]: [1762629694.9062] device (cni-podman0): state change: unavailable -> disconnected (reason 'connection-assumed', sys-iface-state: 'external') Nov 08 14:21:34 managed-node1 NetworkManager[576]: [1762629694.9073] device (cni-podman0): Activation: starting connection 'cni-podman0' (95f62582-884a-4db2-a820-cae249f5fef9) Nov 08 14:21:34 managed-node1 NetworkManager[576]: [1762629694.9074] device (cni-podman0): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'external') Nov 08 14:21:34 managed-node1 NetworkManager[576]: [1762629694.9077] device (cni-podman0): state change: prepare -> config (reason 'none', sys-iface-state: 'external') Nov 08 14:21:34 managed-node1 NetworkManager[576]: [1762629694.9079] device (cni-podman0): state change: config -> ip-config (reason 'none', sys-iface-state: 'external') Nov 08 14:21:34 managed-node1 NetworkManager[576]: [1762629694.9082] device (cni-podman0): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'external') Nov 08 14:21:34 managed-node1 NetworkManager[576]: [1762629694.9086] device (cni-podman0): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'external') Nov 08 14:21:34 managed-node1 NetworkManager[576]: [1762629694.9088] device (cni-podman0): state change: secondaries -> activated (reason 'none', sys-iface-state: 'external') Nov 08 14:21:34 managed-node1 NetworkManager[576]: [1762629694.9125] device (cni-podman0): Activation: successful, device activated. Nov 08 14:21:34 managed-node1 dbus[509]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' Nov 08 14:21:34 managed-node1 systemd[1]: Starting Network Manager Script Dispatcher Service... -- Subject: Unit NetworkManager-dispatcher.service has begun start-up -- Defined-By: systemd -- Support: http://lists.freedesktop.org/mailman/listinfo/systemd-devel -- -- Unit NetworkManager-dispatcher.service has begun starting up. Nov 08 14:21:34 managed-node1 dbus[509]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher' Nov 08 14:21:34 managed-node1 systemd[1]: Started Network Manager Script Dispatcher Service. -- Subject: Unit NetworkManager-dispatcher.service has finished start-up -- Defined-By: systemd -- Support: http://lists.freedesktop.org/mailman/listinfo/systemd-devel -- -- Unit NetworkManager-dispatcher.service has finished starting up. -- -- The start-up result is done. Nov 08 14:21:34 managed-node1 nm-dispatcher[10374]: req:1 'up' [cni-podman0]: new request (4 scripts) Nov 08 14:21:34 managed-node1 nm-dispatcher[10374]: req:1 'up' [cni-podman0]: start running ordered scripts... Nov 08 14:21:34 managed-node1 kernel: nf_conntrack version 0.5.0 (16384 buckets, 65536 max) Nov 08 14:21:35 managed-node1 systemd[1]: Created slice Virtual Machine and Container Slice. -- Subject: Unit machine.slice has finished start-up -- Defined-By: systemd -- Support: http://lists.freedesktop.org/mailman/listinfo/systemd-devel -- -- Unit machine.slice has finished starting up. -- -- The start-up result is done. Nov 08 14:21:35 managed-node1 systemd[1]: Started libpod-conmon-a99b6d77852abd277ac5765870cbf5196206324e7d881325ff3e0efcb98c30fc.scope. -- Subject: Unit libpod-conmon-a99b6d77852abd277ac5765870cbf5196206324e7d881325ff3e0efcb98c30fc.scope has finished start-up -- Defined-By: systemd -- Support: http://lists.freedesktop.org/mailman/listinfo/systemd-devel -- -- Unit libpod-conmon-a99b6d77852abd277ac5765870cbf5196206324e7d881325ff3e0efcb98c30fc.scope has finished starting up. -- -- The start-up result is done. Nov 08 14:21:35 managed-node1 systemd[1]: Started libcontainer container a99b6d77852abd277ac5765870cbf5196206324e7d881325ff3e0efcb98c30fc. -- Subject: Unit libpod-a99b6d77852abd277ac5765870cbf5196206324e7d881325ff3e0efcb98c30fc.scope has finished start-up -- Defined-By: systemd -- Support: http://lists.freedesktop.org/mailman/listinfo/systemd-devel -- -- Unit libpod-a99b6d77852abd277ac5765870cbf5196206324e7d881325ff3e0efcb98c30fc.scope has finished starting up. -- -- The start-up result is done. Nov 08 14:21:35 managed-node1 kernel: SELinux: mount invalid. Same superblock, different security settings for (dev mqueue, type mqueue) Nov 08 14:21:35 managed-node1 podman[10328]: 2025-11-08 14:21:35.216649107 -0500 EST m=+26.015760527 container init a99b6d77852abd277ac5765870cbf5196206324e7d881325ff3e0efcb98c30fc (image=ghcr.io/candlepin/candlepin-unofficial:latest, name=candlepin) Nov 08 14:21:35 managed-node1 podman[10328]: 2025-11-08 14:21:35.232242362 -0500 EST m=+26.031353989 container start a99b6d77852abd277ac5765870cbf5196206324e7d881325ff3e0efcb98c30fc (image=ghcr.io/candlepin/candlepin-unofficial:latest, name=candlepin) Nov 08 14:21:35 managed-node1 sudo[10324]: pam_unix(sudo:session): session closed for user root Nov 08 14:21:35 managed-node1 systemd-rc-local-generator[10]: /etc/rc.d/rc.local is not marked executable, skipping. Nov 08 14:21:35 managed-node1 systemd-journald[19]: Received client request to flush runtime journal. Nov 08 14:21:35 managed-node1 systemd[1]: getty@tty1.service has no holdoff time, scheduling restart. Nov 08 14:21:35 managed-node1 systemd[1]: Stopped Getty on tty1. -- Subject: Unit getty@tty1.service has finished shutting down -- Defined-By: systemd -- Support: http://lists.freedesktop.org/mailman/listinfo/systemd-devel -- -- Unit getty@tty1.service has finished shutting down. Nov 08 14:21:35 managed-node1 systemd[1]: Started Getty on tty1. -- Subject: Unit getty@tty1.service has finished start-up -- Defined-By: systemd -- Support: http://lists.freedesktop.org/mailman/listinfo/systemd-devel -- -- Unit getty@tty1.service has finished starting up. -- -- The start-up result is done. Nov 08 14:21:35 managed-node1 sudo[10669]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-yitulymeiqnfssmozcntdeyxrfvdeydd ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629695.37-11895-177726933919828/AnsiballZ_file.py Nov 08 14:21:35 managed-node1 sudo[10669]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:21:35 managed-node1 ansible-file[10673]: Invoked with directory_mode=None force=False remote_src=None _original_basename=None path=/etc/pki/product owner=None follow=True group=None unsafe_writes=False state=directory content=NOT_LOGGING_PARAMETER serole=None selevel=None setype=None access_time=None access_time_format=%Y%m%d%H%M.%S modification_time=None regexp=None src=None seuser=None recurse=False _diff_peek=None delimiter=None mode=0755 modification_time_format=%Y%m%d%H%M.%S attributes=None backup=None Nov 08 14:21:35 managed-node1 sudo[10669]: pam_unix(sudo:session): session closed for user root Nov 08 14:21:36 managed-node1 sudo[10725]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-weopbrgxeesqajfcwsqkcetlwrvtuhdj ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629696.0-11895-129078128907536/AnsiballZ_file.py Nov 08 14:21:36 managed-node1 sudo[10725]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:21:36 managed-node1 ansible-file[10728]: Invoked with directory_mode=None force=False remote_src=None _original_basename=None path=/etc/pki/product-default owner=None follow=True group=None unsafe_writes=False state=directory content=NOT_LOGGING_PARAMETER serole=None selevel=None setype=None access_time=None access_time_format=%Y%m%d%H%M.%S modification_time=None regexp=None src=None seuser=None recurse=False _diff_peek=None delimiter=None mode=0755 modification_time_format=%Y%m%d%H%M.%S attributes=None backup=None Nov 08 14:21:36 managed-node1 sudo[10725]: pam_unix(sudo:session): session closed for user root Nov 08 14:21:37 managed-node1 sudo[10777]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-mjttrbescayycqfyjzgspznuagyoxvcf ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629696.75-11895-14888208202874/AnsiballZ_file.py Nov 08 14:21:37 managed-node1 sudo[10777]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:21:37 managed-node1 ansible-file[10780]: Invoked with directory_mode=None force=False remote_src=None _original_basename=None path=/etc/rhsm/ca owner=None follow=True group=None unsafe_writes=False state=directory content=NOT_LOGGING_PARAMETER serole=None selevel=None setype=None access_time=None access_time_format=%Y%m%d%H%M.%S modification_time=None regexp=None src=None seuser=None recurse=False _diff_peek=None delimiter=None mode=0755 modification_time_format=%Y%m%d%H%M.%S attributes=None backup=None Nov 08 14:21:37 managed-node1 sudo[10777]: pam_unix(sudo:session): session closed for user root Nov 08 14:21:37 managed-node1 sudo[10829]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-llwgwlbwexrwtqzxpdkvooevvevymnej ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629697.44-11932-257279371958103/AnsiballZ_command.py Nov 08 14:21:37 managed-node1 sudo[10829]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:21:38 managed-node1 ansible-command[10832]: Invoked with creates=None executable=None _uses_shell=False strip_empty_ends=True _raw_params=None removes=None argv=['podman', 'cp', 'candlepin:/home/candlepin/devel/candlepin/generated_certs/7050.pem', '/etc/pki/product-default/'] warn=True chdir=None stdin_add_newline=True stdin=None Nov 08 14:21:38 managed-node1 podman[10833]: 2025-11-08 14:21:38.179572559 -0500 EST m=+0.067806318 container mount a99b6d77852abd277ac5765870cbf5196206324e7d881325ff3e0efcb98c30fc (image=ghcr.io/candlepin/candlepin-unofficial:latest, name=candlepin) Nov 08 14:21:38 managed-node1 podman[10833]: 2025-11-08 14:21:38.212798106 -0500 EST m=+0.101031771 container pause a99b6d77852abd277ac5765870cbf5196206324e7d881325ff3e0efcb98c30fc (image=ghcr.io/candlepin/candlepin-unofficial:latest, name=candlepin) Nov 08 14:21:38 managed-node1 podman[10833]: 2025-11-08 14:21:38.564382741 -0500 EST m=+0.452616553 container unpause a99b6d77852abd277ac5765870cbf5196206324e7d881325ff3e0efcb98c30fc (image=ghcr.io/candlepin/candlepin-unofficial:latest, name=candlepin) Nov 08 14:21:38 managed-node1 podman[10833]: 2025-11-08 14:21:38.564930557 -0500 EST m=+0.453164541 container unmount a99b6d77852abd277ac5765870cbf5196206324e7d881325ff3e0efcb98c30fc (image=ghcr.io/candlepin/candlepin-unofficial:latest, name=candlepin) Nov 08 14:21:38 managed-node1 sudo[10829]: pam_unix(sudo:session): session closed for user root Nov 08 14:21:39 managed-node1 sudo[10932]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-sbyhidsvapcvoqocwiatmqrmwzvcuflu ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629698.87-11955-22270090865724/AnsiballZ_command.py Nov 08 14:21:39 managed-node1 sudo[10932]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:21:39 managed-node1 ansible-command[10935]: Invoked with creates=None executable=None _uses_shell=False strip_empty_ends=True _raw_params=None removes=None argv=['podman', 'cp', 'candlepin:/etc/candlepin/certs/candlepin-ca.crt', '/etc/rhsm/ca/candlepin-ca.pem'] warn=True chdir=None stdin_add_newline=True stdin=None Nov 08 14:21:39 managed-node1 podman[10936]: 2025-11-08 14:21:39.497354351 -0500 EST m=+0.058195714 container mount a99b6d77852abd277ac5765870cbf5196206324e7d881325ff3e0efcb98c30fc (image=ghcr.io/candlepin/candlepin-unofficial:latest, name=candlepin) Nov 08 14:21:39 managed-node1 podman[10936]: 2025-11-08 14:21:39.511389909 -0500 EST m=+0.072231115 container pause a99b6d77852abd277ac5765870cbf5196206324e7d881325ff3e0efcb98c30fc (image=ghcr.io/candlepin/candlepin-unofficial:latest, name=candlepin) Nov 08 14:21:39 managed-node1 podman[10936]: 2025-11-08 14:21:39.852380325 -0500 EST m=+0.413221650 container unpause a99b6d77852abd277ac5765870cbf5196206324e7d881325ff3e0efcb98c30fc (image=ghcr.io/candlepin/candlepin-unofficial:latest, name=candlepin) Nov 08 14:21:39 managed-node1 podman[10936]: 2025-11-08 14:21:39.853278249 -0500 EST m=+0.414119689 container unmount a99b6d77852abd277ac5765870cbf5196206324e7d881325ff3e0efcb98c30fc (image=ghcr.io/candlepin/candlepin-unofficial:latest, name=candlepin) Nov 08 14:21:39 managed-node1 sudo[10932]: pam_unix(sudo:session): session closed for user root Nov 08 14:21:40 managed-node1 sudo[11035]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-djfsdnldwssdymgkwaosiuwgfgtiafnn ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629700.16-11971-48349941057095/AnsiballZ_command.py Nov 08 14:21:40 managed-node1 sudo[11035]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:21:40 managed-node1 ansible-command[11038]: Invoked with creates=None executable=None _uses_shell=False strip_empty_ends=True _raw_params=None removes=None argv=['podman', 'cp', 'candlepin:/etc/candlepin/certs/candlepin-ca.crt', '/etc/pki/ca-trust/source/anchors/candlepin-ca.pem'] warn=True chdir=None stdin_add_newline=True stdin=None Nov 08 14:21:40 managed-node1 podman[11039]: 2025-11-08 14:21:40.908130291 -0500 EST m=+0.092825826 container mount a99b6d77852abd277ac5765870cbf5196206324e7d881325ff3e0efcb98c30fc (image=ghcr.io/candlepin/candlepin-unofficial:latest, name=candlepin) Nov 08 14:21:40 managed-node1 podman[11039]: 2025-11-08 14:21:40.930092628 -0500 EST m=+0.114787975 container pause a99b6d77852abd277ac5765870cbf5196206324e7d881325ff3e0efcb98c30fc (image=ghcr.io/candlepin/candlepin-unofficial:latest, name=candlepin) Nov 08 14:21:41 managed-node1 podman[11039]: 2025-11-08 14:21:41.285405872 -0500 EST m=+0.470101313 container unpause a99b6d77852abd277ac5765870cbf5196206324e7d881325ff3e0efcb98c30fc (image=ghcr.io/candlepin/candlepin-unofficial:latest, name=candlepin) Nov 08 14:21:41 managed-node1 podman[11039]: 2025-11-08 14:21:41.285959706 -0500 EST m=+0.470655333 container unmount a99b6d77852abd277ac5765870cbf5196206324e7d881325ff3e0efcb98c30fc (image=ghcr.io/candlepin/candlepin-unofficial:latest, name=candlepin) Nov 08 14:21:41 managed-node1 sudo[11035]: pam_unix(sudo:session): session closed for user root Nov 08 14:21:42 managed-node1 sudo[11138]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-fumdbejichzmpvvobcmyqtpwwsoixjtl ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629701.61-11987-171638762087806/AnsiballZ_command.py Nov 08 14:21:42 managed-node1 sudo[11138]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:21:42 managed-node1 ansible-command[11141]: Invoked with creates=None executable=None _uses_shell=False strip_empty_ends=True _raw_params=None removes=None argv=['update-ca-trust', 'extract'] warn=True chdir=None stdin_add_newline=True stdin=None Nov 08 14:21:44 managed-node1 sudo[11138]: pam_unix(sudo:session): session closed for user root Nov 08 14:21:45 managed-node1 sudo[11196]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-wquvoqdlqzkgywgwzsgxcjujmghrrdqk ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629704.99-12024-91940006305926/AnsiballZ_uri.py Nov 08 14:21:45 managed-node1 sudo[11196]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:21:45 managed-node1 ansible-uri[11199]: Invoked with directory_mode=None force=False remote_src=None status_code=[200] body_format=raw owner=None follow=False client_key=None group=None use_proxy=True headers={} unsafe_writes=False serole=None content=NOT_LOGGING_PARAMETER setype=None follow_redirects=safe return_content=False client_cert=None body=None timeout=30 src=None dest=None selevel=None force_basic_auth=False removes=None http_agent=ansible-httpget regexp=None url_password=NOT_LOGGING_PARAMETER url=https://candlepin.local:8443/candlepin validate_certs=False seuser=None method=HEAD creates=None unix_socket=None delimiter=None mode=None url_username=None attributes=None backup=None Nov 08 14:21:45 managed-node1 systemd[1]: getty@tty1.service has no holdoff time, scheduling restart. Nov 08 14:21:45 managed-node1 systemd[1]: Stopped Getty on tty1. -- Subject: Unit getty@tty1.service has finished shutting down -- Defined-By: systemd -- Support: http://lists.freedesktop.org/mailman/listinfo/systemd-devel -- -- Unit getty@tty1.service has finished shutting down. Nov 08 14:21:45 managed-node1 systemd[1]: Started Getty on tty1. -- Subject: Unit getty@tty1.service has finished start-up -- Defined-By: systemd -- Support: http://lists.freedesktop.org/mailman/listinfo/systemd-devel -- -- Unit getty@tty1.service has finished starting up. -- -- The start-up result is done. Nov 08 14:22:01 managed-node1 sudo[11196]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:02 managed-node1 sudo[11359]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-emfwjhezdueeqwgfhgubujivnhqnbmhs ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629722.04-12600-141126453331434/AnsiballZ_get_url.py Nov 08 14:22:02 managed-node1 sudo[11359]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:02 managed-node1 ansible-get_url[11362]: Invoked with directory_mode=None force=False remote_src=None owner=None follow=False client_key=None group=None use_proxy=True unsafe_writes=False serole=None content=NOT_LOGGING_PARAMETER validate_certs=True setype=None client_cert=None timeout=10 url_password=NOT_LOGGING_PARAMETER dest=/etc/pki/rpm-gpg/RPM-GPG-KEY-candlepin selevel=None force_basic_auth=False sha256sum= http_agent=ansible-httpget regexp=None src=None url=http://candlepin.local:8080/RPM-GPG-KEY-candlepin checksum= seuser=None headers=None delimiter=None mode=0644 url_username=None attributes=None backup=None tmp_dest=None Nov 08 14:22:02 managed-node1 sudo[11359]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:02 managed-node1 sudo[11415]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-rsdofizcigpgvohlqmycfhvhqegvdijn ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629722.64-12622-277061376560157/AnsiballZ_uri.py Nov 08 14:22:02 managed-node1 sudo[11415]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:02 managed-node1 ansible-uri[11418]: Invoked with directory_mode=None force=False remote_src=None status_code=[200] body_format=raw owner=None follow=False client_key=None group=None use_proxy=True headers={} unsafe_writes=False serole=None content=NOT_LOGGING_PARAMETER setype=None follow_redirects=safe return_content=False client_cert=None body=None timeout=30 src=None dest=None selevel=None force_basic_auth=False removes=None http_agent=ansible-httpget regexp=None url_password=NOT_LOGGING_PARAMETER url=https://candlepin.local:8443/candlepin validate_certs=False seuser=None method=HEAD creates=None unix_socket=None delimiter=None mode=None url_username=None attributes=None backup=None Nov 08 14:22:02 managed-node1 sudo[11415]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:03 managed-node1 sudo[11471]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-inaxmxtbktgvqixccjliilhaukoxgdml ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629723.1-12631-178365517148732/AnsiballZ_setup.py Nov 08 14:22:03 managed-node1 sudo[11471]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:03 managed-node1 ansible-setup[11474]: Invoked with filter=ansible_pkg_mgr gather_subset=['!all'] fact_path=/etc/ansible/facts.d gather_timeout=10 Nov 08 14:22:03 managed-node1 sudo[11471]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:03 managed-node1 sudo[11504]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-urllbpuizpdcuiffmqsccvyvhiqxqpox ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629723.1-12631-178365517148732/AnsiballZ_yum.py Nov 08 14:22:03 managed-node1 sudo[11504]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:03 managed-node1 ansible-yum[11507]: Invoked with lock_timeout=30 update_cache=False disable_excludes=None exclude=[] allow_downgrade=False disable_gpg_check=False conf_file=None use_backend=auto state=present disablerepo=[] releasever=None skip_broken=False autoremove=False download_dir=None enable_plugin=[] installroot=/ install_weak_deps=True name=['squid', 'httpd-tools'] download_only=False bugfix=False list=None install_repoquery=True update_only=False disable_plugin=[] enablerepo=[] security=False validate_certs=True Nov 08 14:22:07 managed-node1 yum[11518]: Installed: apr-1.4.8-7.el7.x86_64 Nov 08 14:22:07 managed-node1 yum[11518]: Installed: apr-util-1.5.2-6.el7_9.1.x86_64 Nov 08 14:22:07 managed-node1 yum[11518]: Installed: libtool-ltdl-2.4.2-22.el7_3.x86_64 Nov 08 14:22:07 managed-node1 yum[11518]: Installed: libecap-1.0.0-1.el7.x86_64 Nov 08 14:22:07 managed-node1 yum[11518]: Installed: 7:squid-migration-script-3.5.20-17.el7_9.10.x86_64 Nov 08 14:22:07 managed-node1 yum[11518]: Installed: perl-Net-Daemon-0.48-5.el7.noarch Nov 08 14:22:07 managed-node1 yum[11518]: Installed: perl-PlRPC-0.2020-14.el7.noarch Nov 08 14:22:07 managed-node1 yum[11518]: Installed: perl-DBI-1.627-4.el7.x86_64 Nov 08 14:22:07 managed-node1 groupadd[11551]: group added to /etc/group: name=squid, GID=23 Nov 08 14:22:07 managed-node1 groupadd[11551]: group added to /etc/gshadow: name=squid Nov 08 14:22:07 managed-node1 groupadd[11551]: new group: name=squid, GID=23 Nov 08 14:22:07 managed-node1 useradd[11556]: new user: name=squid, UID=23, GID=23, home=/var/spool/squid, shell=/sbin/nologin Nov 08 14:22:08 managed-node1 systemd[1]: Reloading. Nov 08 14:22:08 managed-node1 yum[11518]: Installed: 7:squid-3.5.20-17.el7_9.10.x86_64 Nov 08 14:22:08 managed-node1 yum[11518]: Installed: httpd-tools-2.4.6-99.el7.centos.1.x86_64 Nov 08 14:22:09 managed-node1 sudo[11504]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:09 managed-node1 sudo[11627]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-zjclpjbhzphnxvuzjktsujcdmkjtnuwz ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629729.14-12714-58838353250677/AnsiballZ_stat.py Nov 08 14:22:09 managed-node1 sudo[11627]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:09 managed-node1 ansible-stat[11630]: Invoked with checksum_algorithm=sha1 get_checksum=True follow=False path=/etc/squid/squid.conf.BACKUP get_md5=False get_mime=True get_attributes=True Nov 08 14:22:09 managed-node1 sudo[11627]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:09 managed-node1 sudo[11679]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-npfcnljmeiequxqndldyigdecnybyhzi ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629729.46-12734-281068120939604/AnsiballZ_copy.py Nov 08 14:22:09 managed-node1 sudo[11679]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:09 managed-node1 ansible-copy[11682]: Invoked with directory_mode=None force=True remote_src=True _original_basename=None owner=None follow=False local_follow=None group=None unsafe_writes=False setype=None content=NOT_LOGGING_PARAMETER serole=None dest=/etc/squid/squid.conf.BACKUP selevel=None regexp=None validate=None src=/etc/squid/squid.conf checksum=None seuser=None delimiter=None mode=0644 attributes=None backup=False Nov 08 14:22:09 managed-node1 sudo[11679]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:10 managed-node1 sudo[11731]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-drguuusokxctepixrxnjknjtlnrxhxwj ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629730.0-12749-194035422163166/AnsiballZ_lineinfile.py Nov 08 14:22:10 managed-node1 sudo[11731]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:10 managed-node1 ansible-lineinfile[11734]: Invoked with directory_mode=None force=None remote_src=None backrefs=False owner=None path=/etc/squid/squid.conf insertafter=None follow=False validate=None group=None insertbefore=^acl Safe_ports unsafe_writes=False create=False state=present content=NOT_LOGGING_PARAMETER serole=None setype=None selevel=None regexp=^acl SSL_ports port 8443 line=acl SSL_ports port 8443 # Candlepin src=None seuser=None delimiter=None mode=None firstmatch=True attributes=None backup=False Nov 08 14:22:10 managed-node1 sudo[11731]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:10 managed-node1 sudo[11783]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-xfuglcaoxsgwcflapbjffgyaslgimeaz ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629730.32-12761-198438798677990/AnsiballZ_lineinfile.py Nov 08 14:22:10 managed-node1 sudo[11783]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:10 managed-node1 ansible-lineinfile[11786]: Invoked with directory_mode=None force=None remote_src=None backrefs=False insertafter=None path=/etc/squid/squid.conf owner=None follow=False validate=None group=None insertbefore=None unsafe_writes=False create=False state=present content=NOT_LOGGING_PARAMETER serole=None setype=None selevel=None regexp=^shutdown_lifetime line=shutdown_lifetime 5 seconds src=None seuser=None delimiter=None mode=None firstmatch=False attributes=None backup=False Nov 08 14:22:10 managed-node1 sudo[11783]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:10 managed-node1 sudo[11835]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-dfzdfawodbhdvlttncrnjimaulxnmdru ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629730.62-12774-130053982831811/AnsiballZ_lineinfile.py Nov 08 14:22:10 managed-node1 sudo[11835]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:10 managed-node1 ansible-lineinfile[11838]: Invoked with directory_mode=None force=None remote_src=None backrefs=False insertafter=None path=/etc/squid/squid.conf owner=None follow=False validate=None group=None insertbefore=None unsafe_writes=False create=False state=present content=NOT_LOGGING_PARAMETER serole=None setype=None selevel=None regexp=^http_port line=http_port 3128 src=None seuser=None delimiter=None mode=None firstmatch=False attributes=None backup=False Nov 08 14:22:10 managed-node1 sudo[11835]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:11 managed-node1 sudo[11887]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-liqfntanlsapexjusyfeayvsrgkmnwyp ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629731.15-12795-92885302523271/AnsiballZ_setup.py Nov 08 14:22:11 managed-node1 sudo[11887]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:11 managed-node1 ansible-setup[11890]: Invoked with filter=ansible_service_mgr gather_subset=['!all'] fact_path=/etc/ansible/facts.d gather_timeout=10 Nov 08 14:22:11 managed-node1 sudo[11887]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:11 managed-node1 sudo[11920]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-uhrktthbvkoocszwlyfkwiqdiocghjmc ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629731.15-12795-92885302523271/AnsiballZ_systemd.py Nov 08 14:22:11 managed-node1 sudo[11920]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:11 managed-node1 ansible-systemd[11923]: Invoked with no_block=False force=None name=squid daemon_reexec=False enabled=None daemon_reload=False state=restarted masked=None scope=None user=None Nov 08 14:22:11 managed-node1 polkitd[508]: Registered Authentication Agent for unix-process:11927:33135 (system bus name :1.85 [/usr/bin/pkttyagent --notify-fd 5 --fallback], object path /org/freedesktop/PolicyKit1/AuthenticationAgent, locale en_US.UTF-8) Nov 08 14:22:11 managed-node1 systemd[1]: Starting Squid caching proxy... -- Subject: Unit squid.service has begun start-up -- Defined-By: systemd -- Support: http://lists.freedesktop.org/mailman/listinfo/systemd-devel -- -- Unit squid.service has begun starting up. Nov 08 14:22:11 managed-node1 squid[11941]: Squid Parent: will start 1 kids Nov 08 14:22:11 managed-node1 squid[11941]: Squid Parent: (squid-1) process 11943 started Nov 08 14:22:11 managed-node1 systemd[1]: Started Squid caching proxy. -- Subject: Unit squid.service has finished start-up -- Defined-By: systemd -- Support: http://lists.freedesktop.org/mailman/listinfo/systemd-devel -- -- Unit squid.service has finished starting up. -- -- The start-up result is done. Nov 08 14:22:11 managed-node1 polkitd[508]: Unregistered Authentication Agent for unix-process:11927:33135 (system bus name :1.85, object path /org/freedesktop/PolicyKit1/AuthenticationAgent, locale en_US.UTF-8) (disconnected from bus) Nov 08 14:22:12 managed-node1 sudo[11920]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:12 managed-node1 sudo[11993]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-bblpjuhcsrfgxggbkfsuhzaqxpohzpiw ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629732.27-12819-20899003911477/AnsiballZ_setup.py Nov 08 14:22:12 managed-node1 sudo[11993]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:12 managed-node1 ansible-setup[11996]: Invoked with filter=* gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'os_family', 'python_version'] fact_path=/etc/ansible/facts.d gather_timeout=10 Nov 08 14:22:12 managed-node1 sudo[11993]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:12 managed-node1 sudo[12050]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-glqakimgboqntwxiurgctppqwmcelpwz ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629732.67-12837-1077651164203/AnsiballZ_stat.py Nov 08 14:22:12 managed-node1 sudo[12050]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:12 managed-node1 ansible-stat[12053]: Invoked with checksum_algorithm=sha1 get_checksum=True follow=False path=/run/ostree-booted get_md5=False get_mime=True get_attributes=True Nov 08 14:22:12 managed-node1 sudo[12050]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:13 managed-node1 sudo[12102]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-vncdwcfckmzbhypnlczhwralfekaflee ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629733.0-12848-17452636249650/AnsiballZ_stat.py Nov 08 14:22:13 managed-node1 sudo[12102]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:13 managed-node1 ansible-stat[12105]: Invoked with checksum_algorithm=sha1 get_checksum=True follow=False path=/sbin/transactional-update get_md5=False get_mime=True get_attributes=True Nov 08 14:22:13 managed-node1 sudo[12102]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:13 managed-node1 sudo[12154]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-dmkjctvdijlysjodjvjzumzarmzbptgn ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629733.33-12859-114584511352361/AnsiballZ_setup.py Nov 08 14:22:13 managed-node1 sudo[12154]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:13 managed-node1 ansible-setup[12157]: Invoked with filter=ansible_pkg_mgr gather_subset=['!all'] fact_path=/etc/ansible/facts.d gather_timeout=10 Nov 08 14:22:13 managed-node1 sudo[12154]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:13 managed-node1 sudo[12187]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-tfkudyzusijxzovvcnvpaboamnlkxrgm ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629733.33-12859-114584511352361/AnsiballZ_yum.py Nov 08 14:22:13 managed-node1 sudo[12187]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:13 managed-node1 ansible-yum[12190]: Invoked with lock_timeout=30 update_cache=False disable_excludes=None exclude=[] allow_downgrade=False disable_gpg_check=False conf_file=None use_backend=auto state=present disablerepo=[] releasever=None skip_broken=False autoremove=False download_dir=None enable_plugin=[] installroot=/ install_weak_deps=True name=['libselinux-python', 'policycoreutils-python'] download_only=False bugfix=False list=None install_repoquery=True update_only=False disable_plugin=[] enablerepo=[] security=False validate_certs=True Nov 08 14:22:14 managed-node1 sudo[12187]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:14 managed-node1 sudo[12249]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-dtwcbbokebffwubudgsqoahdlrhjvvhh ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629734.44-12900-181758977469715/AnsiballZ_setup.py Nov 08 14:22:14 managed-node1 sudo[12249]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:14 managed-node1 ansible-setup[12252]: Invoked with filter=ansible_selinux gather_subset=['all'] fact_path=/etc/ansible/facts.d gather_timeout=10 Nov 08 14:22:14 managed-node1 sudo[12249]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:15 managed-node1 sudo[12335]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-hpwsrgwosmjgqshohiclngtzkitgpxts ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629734.98-12909-237200010821622/AnsiballZ_command.py Nov 08 14:22:15 managed-node1 sudo[12335]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:15 managed-node1 ansible-command[12338]: Invoked with creates=None executable=None _uses_shell=False strip_empty_ends=True _raw_params=systemctl is-system-running removes=None argv=None warn=True chdir=None stdin_add_newline=True stdin=None Nov 08 14:22:15 managed-node1 sudo[12335]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:16 managed-node1 sudo[12388]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-wwlwhkxwksduroapuzihswmuhsanagrt ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629735.81-12955-39238659806405/AnsiballZ_local_seport.py Nov 08 14:22:16 managed-node1 sudo[12388]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:16 managed-node1 ansible-fedora.linux_system_roles.local_seport[12391]: Invoked with proto=tcp state=present ignore_selinux_state=False reload=True setype=squid_port_t local=True ports=['3128', '3130', '4000'] Nov 08 14:22:16 managed-node1 kernel: SELinux: 2048 avtab hash slots, 113365 rules. Nov 08 14:22:16 managed-node1 kernel: SELinux: 2048 avtab hash slots, 113365 rules. Nov 08 14:22:16 managed-node1 kernel: SELinux: 8 users, 14 roles, 5054 types, 318 bools, 1 sens, 1024 cats Nov 08 14:22:16 managed-node1 kernel: SELinux: 130 classes, 113365 rules Nov 08 14:22:16 managed-node1 kernel: SELinux: Converting 2324 SID table entries... Nov 08 14:22:18 managed-node1 dbus[509]: [system] Reloaded configuration Nov 08 14:22:18 managed-node1 kernel: SELinux: 2048 avtab hash slots, 113365 rules. Nov 08 14:22:18 managed-node1 kernel: SELinux: 2048 avtab hash slots, 113365 rules. Nov 08 14:22:18 managed-node1 kernel: SELinux: 8 users, 14 roles, 5054 types, 318 bools, 1 sens, 1024 cats Nov 08 14:22:18 managed-node1 kernel: SELinux: 130 classes, 113365 rules Nov 08 14:22:18 managed-node1 kernel: SELinux: Converting 2324 SID table entries... Nov 08 14:22:20 managed-node1 dbus[509]: [system] Reloaded configuration Nov 08 14:22:20 managed-node1 kernel: SELinux: 2048 avtab hash slots, 113365 rules. Nov 08 14:22:21 managed-node1 kernel: SELinux: 2048 avtab hash slots, 113365 rules. Nov 08 14:22:21 managed-node1 kernel: SELinux: 8 users, 14 roles, 5054 types, 318 bools, 1 sens, 1024 cats Nov 08 14:22:21 managed-node1 kernel: SELinux: 130 classes, 113365 rules Nov 08 14:22:21 managed-node1 kernel: SELinux: Converting 2324 SID table entries... Nov 08 14:22:22 managed-node1 dbus[509]: [system] Reloaded configuration Nov 08 14:22:22 managed-node1 sudo[12388]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:23 managed-node1 sudo[12452]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-vrhssheboistkxklbxffsdjmokkpeotl ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629742.94-13015-85394285991172/AnsiballZ_selinux_modules_facts.py Nov 08 14:22:23 managed-node1 sudo[12452]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:23 managed-node1 ansible-fedora.linux_system_roles.selinux_modules_facts[12455]: Invoked Nov 08 14:22:25 managed-node1 sudo[12452]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:26 managed-node1 sudo[12504]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-ymsiwntifpmcidrnotrskeuhdqjjpiqo ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629746.49-13067-212498100460227/AnsiballZ_setup.py Nov 08 14:22:26 managed-node1 sudo[12504]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:26 managed-node1 ansible-setup[12507]: Invoked with filter=ansible_pkg_mgr gather_subset=['!all'] fact_path=/etc/ansible/facts.d gather_timeout=10 Nov 08 14:22:26 managed-node1 sudo[12504]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:26 managed-node1 sudo[12537]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-mbsjxpcaaqcintsmbktzbamhrbmwozzp ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629746.49-13067-212498100460227/AnsiballZ_yum.py Nov 08 14:22:26 managed-node1 sudo[12537]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:27 managed-node1 ansible-yum[12540]: Invoked with lock_timeout=30 update_cache=False disable_excludes=None exclude=[] allow_downgrade=False disable_gpg_check=False conf_file=None use_backend=auto state=present disablerepo=[] releasever=None skip_broken=False autoremove=False download_dir=None enable_plugin=[] installroot=/ install_weak_deps=True name=['subscription-manager'] download_only=False bugfix=False list=None install_repoquery=True update_only=False disable_plugin=[] enablerepo=[] security=False validate_certs=True Nov 08 14:22:27 managed-node1 sudo[12537]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:28 managed-node1 sudo[12624]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-mcgxetwfbebahlidentrrmsyzbndolls ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629748.5-13164-35558794742178/AnsiballZ_setup.py Nov 08 14:22:28 managed-node1 sudo[12624]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:28 managed-node1 ansible-setup[12627]: Invoked with filter=ansible_pkg_mgr gather_subset=['!all'] fact_path=/etc/ansible/facts.d gather_timeout=10 Nov 08 14:22:28 managed-node1 sudo[12624]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:28 managed-node1 sudo[12657]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-thkmtapzpuvowwvtjbskhwmxcfmgafdf ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629748.5-13164-35558794742178/AnsiballZ_yum.py Nov 08 14:22:28 managed-node1 sudo[12657]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:29 managed-node1 ansible-yum[12660]: Invoked with lock_timeout=30 update_cache=False disable_excludes=None exclude=[] allow_downgrade=False disable_gpg_check=False conf_file=None use_backend=auto state=present disablerepo=[] releasever=None skip_broken=False autoremove=False download_dir=None enable_plugin=[] installroot=/ install_weak_deps=True name=['subscription-manager'] download_only=False bugfix=False list=None install_repoquery=True update_only=False disable_plugin=[] enablerepo=[] security=False validate_certs=True Nov 08 14:22:29 managed-node1 sudo[12657]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:30 managed-node1 sudo[12744]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-yottnkdltwghmgrsfyqytpbyzkroludy ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629750.4-13258-256880241483593/AnsiballZ_setup.py Nov 08 14:22:30 managed-node1 sudo[12744]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:30 managed-node1 ansible-setup[12747]: Invoked with filter=ansible_pkg_mgr gather_subset=['!all'] fact_path=/etc/ansible/facts.d gather_timeout=10 Nov 08 14:22:30 managed-node1 sudo[12744]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:30 managed-node1 sudo[12777]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-xwqslojknnbsltyzuszkgbzewxvyxdmm ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629750.4-13258-256880241483593/AnsiballZ_yum.py Nov 08 14:22:30 managed-node1 sudo[12777]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:31 managed-node1 ansible-yum[12780]: Invoked with lock_timeout=30 update_cache=False disable_excludes=None exclude=[] allow_downgrade=False disable_gpg_check=False conf_file=None use_backend=auto state=present disablerepo=[] releasever=None skip_broken=False autoremove=False download_dir=None enable_plugin=[] installroot=/ install_weak_deps=True name=['subscription-manager'] download_only=False bugfix=False list=None install_repoquery=True update_only=False disable_plugin=[] enablerepo=[] security=False validate_certs=True Nov 08 14:22:31 managed-node1 sudo[12777]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:32 managed-node1 sudo[12864]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-xvgcsuwdhlbquceyjdnlbycmyizimlxa ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629752.71-13459-222305814496271/AnsiballZ_setup.py Nov 08 14:22:32 managed-node1 sudo[12864]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:32 managed-node1 ansible-setup[12867]: Invoked with filter=ansible_pkg_mgr gather_subset=['!all'] fact_path=/etc/ansible/facts.d gather_timeout=10 Nov 08 14:22:33 managed-node1 sudo[12864]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:33 managed-node1 sudo[12897]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c echo BECOME-SUCCESS-sooljcxgazlayfpeakmgvpsjkogihael ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1762629752.71-13459-222305814496271/AnsiballZ_yum.py Nov 08 14:22:33 managed-node1 sudo[12897]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:33 managed-node1 ansible-yum[12900]: Invoked with lock_timeout=30 update_cache=False disable_excludes=None exclude=[] allow_downgrade=False disable_gpg_check=False conf_file=None use_backend=auto state=present disablerepo=[] releasever=None skip_broken=False autoremove=False download_dir=None enable_plugin=[] installroot=/ install_weak_deps=True name=['subscription-manager'] download_only=False bugfix=False list=None install_repoquery=True update_only=False disable_plugin=[] enablerepo=[] security=False validate_certs=True Nov 08 14:22:33 managed-node1 sudo[12897]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:35 managed-node1 sshd[12969]: Accepted publickey for root from 10.31.41.167 port 44694 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Nov 08 14:22:35 managed-node1 systemd-logind[506]: New session 15 of user root. -- Subject: A new session 15 has been created for user root -- Defined-By: systemd -- Support: http://lists.freedesktop.org/mailman/listinfo/systemd-devel -- Documentation: http://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A new session with the ID 15 has been created for the user root. -- -- The leading process of the session is 12969. Nov 08 14:22:35 managed-node1 systemd[1]: Started Session 15 of user root. -- Subject: Unit session-15.scope has finished start-up -- Defined-By: systemd -- Support: http://lists.freedesktop.org/mailman/listinfo/systemd-devel -- -- Unit session-15.scope has finished starting up. -- -- The start-up result is done. Nov 08 14:22:35 managed-node1 sshd[12969]: pam_unix(sshd:session): session opened for user root by (uid=0) Nov 08 14:22:35 managed-node1 sshd[12969]: Received disconnect from 10.31.41.167 port 44694:11: disconnected by user Nov 08 14:22:35 managed-node1 sshd[12969]: Disconnected from 10.31.41.167 port 44694 Nov 08 14:22:35 managed-node1 sshd[12969]: pam_unix(sshd:session): session closed for user root Nov 08 14:22:35 managed-node1 systemd-logind[506]: Removed session 15. -- Subject: Session 15 has been terminated -- Defined-By: systemd -- Support: http://lists.freedesktop.org/mailman/listinfo/systemd-devel -- Documentation: http://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A session with the ID 15 has been terminated. Nov 08 14:22:35 managed-node1 sshd[12980]: Accepted publickey for root from 10.31.41.167 port 44696 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Nov 08 14:22:35 managed-node1 systemd-logind[506]: New session 16 of user root. -- Subject: A new session 16 has been created for user root -- Defined-By: systemd -- Support: http://lists.freedesktop.org/mailman/listinfo/systemd-devel -- Documentation: http://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A new session with the ID 16 has been created for the user root. -- -- The leading process of the session is 12980. Nov 08 14:22:35 managed-node1 sshd[12980]: pam_unix(sshd:session): session opened for user root by (uid=0) Nov 08 14:22:35 managed-node1 systemd[1]: Started Session 16 of user root. -- Subject: Unit session-16.scope has finished start-up -- Defined-By: systemd -- Support: http://lists.freedesktop.org/mailman/listinfo/systemd-devel -- -- Unit session-16.scope has finished starting up. -- -- The start-up result is done.